if my code like this:
from sklearn.model_selection import train_test_split
from keras.models import Sequential
from keras.layers import Dropout, Flatten, Dense, Conv2D, MaxPooling2D, BatchNormalization
from keras.callbacks import TensorBoard
from keras.callbacks import ModelCheckpoint
from keras.optimizers import Adam
from keras.optimizers import SGD
then it report me the error is:
ImportError Traceback (most recent call last)
<ipython-input-13-65309f3d78a9> in <module>()
6 from keras.callbacks import TensorBoard
7 from keras.callbacks import ModelCheckpoint
----> 8 from keras.optimizers import Adam
9 from keras.optimizers import SGD
10
ImportError: cannot import name 'Adam'
if my code like that:
from tensorflow.keras.optimizers import Adam
model.compile(loss='sparse_categorical_crossentropy',optimizer=Adam(),metrics=['acc'])
it reports the error:
~/anaconda3/lib/python3.6/site-packages/keras/optimizers.py in get(identifier)
else:
raise ValueError('Could not interpret optimizer identifier: {}'.format(identifier))
ValueError: Could not interpret optimizer identifier: <tensorflow.python.keras.optimizer_v2.adam.Adam object at 0x7fa9bd68c048>
The import statement looks fine:
from tensorflow.keras.optimizers import Adam
The compile method has three parameters (loss, optimizer and metrics). Optimizer can be a string . For example:
model.compile(loss='sparse_categorical_crossentropy',optimizer='adam',metrics=['acc'])
You can also specify Adam as a variable and use that variable as your optimizer:
example = Adam(learning_rate=0.1)
model.compile(loss='sparse_categorical_crossentropy',optimizer=example,metrics=['acc'])
The default values for Adam are here.
Related
I'm very new to git and GitHub, and I'm trying to include a repository from GitHub to learn some basics of Graph Convolutional Neural Network. I have used "git init" to initialize the folder on my PC and used git submodule add https://github.com/vermaMachineLearning/keras-deep-graph-learning.git to add the repository as a subfolder, but when I tried to import the package using the following code
import os, sys
sys.path.append(os.path.join(os.getcwd(), "keras-deep-graph-learning")) # Adding the submodule to the module search path
sys.path.append(os.path.join(os.getcwd(), "keras-deep-graph-learning/examples")) # Adding the submodule to the module search path
import numpy as np
from keras.layers import Dense, Activation, Dropout
from keras.models import Model, Sequential
from keras.regularizers import l2
from tensorflow.keras.optimizers import Adam
from keras_dgl.layers import GraphCNN
import keras.backend as K
from keras.utils import to_categorical
It will give me the following trackback.
ModuleNotFoundError Traceback (most recent call last)
~\AppData\Local\Temp/ipykernel_22224/914478576.py in <module>
7 from keras.regularizers import l2
8 from tensorflow.keras.optimizers import Adam
----> 9 from keras_dgl.layers import GraphCNN
10 import keras.backend as K
11 from keras.utils import to_categorical
~\python\GNN\keras-deep-graph-learning\keras_dgl\layers\__init__.py in <module>
----> 1 from .graph_cnn_layer import GraphCNN
2 from .multi_graph_cnn_layer import MultiGraphCNN
3 from .graph_attention_cnn_layer import GraphAttentionCNN
4 from .multi_graph_attention_cnn_layer import MultiGraphAttentionCNN
5 from .graph_convolutional_recurrent_layer import GraphConvLSTM
~\python\GNN\keras-deep-graph-learning\keras_dgl\layers\graph_cnn_layer.py in <module>
2 from keras import regularizers
3 import keras.backend as K
----> 4 from keras.engine.topology import Layer
5 import tensorflow as tf
6 from .graph_ops import graph_conv_op
ModuleNotFoundError: No module named 'keras.engine.topology'
My experience with deep learning and GitHub is very limited. Could anyone please kindly let me know if I made any silly mistakes? Thanks a lot for your great support and help.
I've tried QAT implementation in my model training script. I'm using functional API for the model creation.
Steps that I followed to implement QAT API,
Build the model acrhitecture
Inserted the appropriate quantize_model function
Train the model
Let me provide you the code snippet for more clearance
from tensorflow.keras import regularizers
import numpy as np
from tensorflow.keras.models import Model
from tensorflow.keras.layers import TimeDistributed,SeparableConv1D,Dense,Embedding,Conv1D
from tensorflow.keras.layers import Input, Dropout, MaxPooling1D,Flatten, concatenate, BatchNormalization, AveragePooling1D
from tensorflow.keras.layers import LeakyReLU
from tensorflow.keras.utils import Progbar
from tensorflow.keras.constraints import non_neg , min_max_norm, max_norm , unit_norm
from tensorflow.keras.optimizers import Nadam
import tensorflow_model_optimization as tfmot
from tensorflow_model_optimization.python.core.quantization.keras import quantize
words_input = Input(shape=(None,),dtype='int32',name='words_input')
words = Embedding(input_dim=wordEmbeddings.shape[0], output_dim=wordEmbeddings.shape[1], weights=[wordEmbeddings], trainable=False)(words_input)
............
convd_output= SeparableConv1D(kernel_size=4, filters=128, padding='same', activation='relu', strides=1, depth_multiplier=3, bias_regularizer=regularizers.l2(0.0001), kernel_constraint =min_max_norm(0.4,0.9))(output)
convd_output=AveragePooling1D(pool_size=2, strides=1, padding='same')(convd_output)
convd_output=Dropout(0.1)(convd_output)
flatten_output=TimeDistributed(Flatten())(convd_output)
output = TimeDistributed(Dense(len(label2Idx), activation='softmax'))(flatten_output)
inputs_list=[words_input, casing_input,newline_input]
model = Model(*inputs_list, output)
q_aware_model = tfmot.quantization.keras.quantize_model(model)
opt = Nadam(lr=0.0005)
q_aware_model.compile(loss='sparse_categorical_crossentropy', optimizer=opt,metrics = ['sparse_categorical_accuracy'])
print(q_aware_model.summary())
Version details
tensorflow==2.3.0
tensorflow-model-optimization==0.5.0
Issue : I'm using functional API which is supported in QAT API, but still I'm getting a value error
quantize_model 'to_quantize can only either be a tf.keras Sequential
or ValueError: to_quantize can only either be a tf.keras Sequential or
Functional model.
Couldn't able to figure out the issue. It will be helpful if someone help to get over this.
Thanks in advance
import numpy as np
import pandas as pd
import os
import tensorflow as tf
import keras
from keras.applications import VGG19
from keras.models import Sequential
from keras.layers import Dense, Dropout
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
import cv2
Please help me to resolve the error which i m getting on colab. I m new to coding section and have very little knowledge about it. Please help me resolve this error ImportError: cannot import name 'VGG19' from 'keras.applications' (/usr/local/lib/python3.7/dist-packages/keras/applications/init.py).
the right syntax is
from keras.applications.vgg19 import VGG19
You omitted the .vgg19 from your syntax
I made I model with VGG19. Images size for input_shape were 220. And then my model trained 2 diferents images types:
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Dense, Activation, Dropout, Flatten, Conv2D, MaxPooling2D
from tensorflow.keras.optimizers import RMSprop
from tensorflow.keras.applications import VGG19
modelVGG19_4 = tf.keras.Sequential()
modelVGG19_4.add(tf.keras.applications.VGG19(include_top=False, weights='imagenet',
pooling='avg', input_shape=(220, 220, 3)))
modelVGG19_4.summary()
modelVGG19_4.add(tf.keras.layers.Dense(2, activation="softmax"))
opt = tf.keras.optimizers.SGD(0.004)
modelVGG19_4.compile(loss='categorical_crossentropy',
optimizer=opt,
metrics=['accuracy'])
modelVGG19_4.summary()
I was testing a tensorflow model on Postman that uses https://tfhub.dev/google/universal-sentence-encoder-multilingual/3 from tensorflow-hub, knowing that it worked perfectly in jupyter notebook without any error, I encountered this error in postman after sending a POST request that calls predict method.
Error:
"error": "{{function_node __inference_signature_wrapper_133703}} {{function_node __inference_signature_wrapper_133703}} {{function_node __inference__wrapped_model_95698}} {{function_node __inference__wrapped_model_95698}} {{function_node __inference_restored_function_body_51031}} {{function_node __inference_restored_function_body_51031}} [_Derived_]{{function_node __inference___call___6286}} {{function_node __inference___call___6286}} Op type not registered \'SentencepieceOp\' in binary running on 329ddc874964. Make sure the Op and Kernel are registered in the binary running in this process. Note that if you are loading a saved graph which used ops from tf.contrib, accessing (e.g.) `tf.contrib.resampler` should be done before importing the graph, as contrib ops are lazily registered when the module is first accessed.\n\t [[{{node StatefulPartitionedCall}}]]\n\t [[StatefulPartitionedCall]]\n\t [[sequential/keras_layer/StatefulPartitionedCall]]\n\t [[StatefulPartitionedCall]]\n\t [[StatefulPartitionedCall]]"
with a status 404 not found.
and this is my model:
import tensorflow as tf
import numpy as np
import tensorflow_text
import pandas as pd
import random
import tensorflow_hub as hub
from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.keras import losses
from tensorflow.keras import preprocessing
from tensorflow.keras.layers.experimental.preprocessing import TextVectorization
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM, Dense, Dropout
from tensorflow.keras.layers import SpatialDropout1D
from tensorflow.keras.layers import Embedding
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
module_url = "https://tfhub.dev/google/universal-sentence-encoder-multilingual/3"
# Import the Universal Sentence Encoder's TF Hub module
hub_layer = hub.KerasLayer(module_url, input_shape=[], dtype=tf.string, trainable=True)
#some data preprocessing
opt = keras.optimizers.Adam(learning_rate= 0.001)
model = tf.keras.Sequential()
model.add(hub_layer)
model.add(tf.keras.layers.Dense(32, activation='relu'))
model.add(tf.keras.layers.Dense(16, activation='relu'))
model.add(tf.keras.layers.Dense(1,activation='sigmoid'))
model.layers[0].trainable = False
model.compile(loss='binary_crossentropy',optimizer=opt, metrics=['accuracy'])
model.summary()
history = model.fit(np.array(tweet),np.array(sentiment),
validation_split=0.2, epochs=5, batch_size=32)
This is the request in Postman using localhost:.../arabtextclasstfhubtest:predict :
{
"signature_name": "serving_default",
"inputs":
{
"keras_layer_input":["كلامك جميل ورائع"]
}
}
I would like to know if it's a bug in tensoflow-hub or how to fix this problem.
thank you!!
It appears you are exporting a SavedModel to a server binary that answers the Postman requests. That server binary needs to link in the 'SentencepieceOp' from tensorflow_text, because your SavedModel uses it (as it should).
I am trying to use the same code in this page but I am getting an error at the middle of the code.
AttributeError Traceback (most recent call last)
<ipython-input-34-9ff70788070d> in <module>()
----> 1 model = Sequential()
2 model.add(LSTM(units=50, return_sequences=True,input_shape=(x_train.shape[1],1)))
3 model.add(LSTM(units=50, return_sequences=False))
4 model.add(Dense(units=25))
5 model.add(Dense(units=1))
1 frames
/usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py in get_uid(prefix)
66 """
67 global _GRAPH_UID_DICTS
---> 68 graph = tf.get_default_graph()
69 if graph not in _GRAPH_UID_DICTS:
70 _GRAPH_UID_DICTS[graph] = defaultdict(int)
AttributeError: module 'tensorflow' has no attribute 'get_default_graph'
Here is my import list:
#Import the libraries
from tensorflow.keras import backend as K
from tensorflow.keras.models import Sequential, load_model
from tensorflow.keras.layers import LSTM, Dense, RepeatVector, Masking, TimeDistributed
from tensorflow. keras.utils import plot_model
import quandl
import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn.svm import SVR
import pandas_datareader as web
from sklearn.model_selection import train_test_split
import math
from keras.models import Sequential
from keras.layers import Dense, LSTM
from sklearn.preprocessing import MinMaxScaler
#import tensorflow as tf
#newinv=inventory+str(add)
from tensorflow.keras.layers import Embedding
from matplotlib import pyplot as plt
python tensorflow machine-learning keras
Update: After editing the code based on Giorgos' answer, no I get this is the error:
NameError Traceback (most recent call last)
<ipython-input-20-9ff70788070d> in <module>()
----> 1 model = Sequential()
2 model.add(LSTM(units=50, return_sequences=True,input_shape=(x_train.shape[1],1)))
3 model.add(LSTM(units=50, return_sequences=False))
4 model.add(Dense(units=25))
5 model.add(Dense(units=1))
NameError: name 'Sequential' is not defined
Here is my import list:
import math
import pandas_datareader as web
import numpy as np
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
import quandl
import tensorflow as tf
model = tf.keras.Sequential()
from keras.layers import Dense, LSTM
import matplotlib.pyplot as plt
plt.style.use('fivethirtyeight')
Here is where I am getting the error:
#Build the LSTM network model
model = Sequential()
model.add(LSTM(units=50, return_sequences=True,input_shape=(x_train.shape[1],1)))
model.add(LSTM(units=50, return_sequences=False))
model.add(Dense(units=25))
model.add(Dense(units=1))
If you are using tf.keras, instead of
from keras.models import Sequential
from keras.layers import Dense, LSTM
use the following:
import tensorflow as tf
model = tf.keras.Sequential()
model.add(tf.keras.layers.LSTM(units=50, return_sequences=True,input_shape=(x_train.shape[1],1)))
model.add(tf.keras.layers.LSTM(units=50, return_sequences=False))
model.add(tf.keras.layers.Dense(units=25))
model.add(tf.keras.layers.Dense(units=1))
And also make sure to remove your older import from keras.models import Sequential so that Sequential() is not overwritten in namespace. Same applies for from keras.layers import Dense, LSTM.
There might be some incompatibilities between your Keras and TensorFlow. The thing that did the trick for me was uninstalling the Keras using sudo pip uninstall keras and re-installing it.