the very first commit mssage
This commit is contained in:
commit
b7ae1a421a
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
venv
|
46
README.md
Normal file
46
README.md
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
```
|
||||||
|
pip install numpy tensorflow
|
||||||
|
```
|
||||||
|
|
||||||
|
`main.py` didnt run due to `NumPy` import issue. The error is given below :
|
||||||
|
```
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "/home/hdmtp/Image_recog/venv/lib/python3.10/site-packages/numpy/core/__init__.py", line 23, in <module>
|
||||||
|
from . import multiarray
|
||||||
|
File "/home/hdmtp/Image_recog/venv/lib/python3.10/site-packages/numpy/core/multiarray.py", line 10, in <module>
|
||||||
|
from . import overrides
|
||||||
|
File "/home/hdmtp/Image_recog/venv/lib/python3.10/site-packages/numpy/core/overrides.py", line 6, in <module>
|
||||||
|
from numpy.core._multiarray_umath import (
|
||||||
|
ImportError: libz.so.1: cannot open shared object file: No such file or directory
|
||||||
|
|
||||||
|
During handling of the above exception, another exception occurred:
|
||||||
|
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "/home/hdmtp/Image_recog/main.py", line 1, in <module>
|
||||||
|
import numpy as np
|
||||||
|
File "/home/hdmtp/Image_recog/venv/lib/python3.10/site-packages/numpy/__init__.py", line 141, in <module>
|
||||||
|
from . import core
|
||||||
|
File "/home/hdmtp/Image_recog/venv/lib/python3.10/site-packages/numpy/core/__init__.py", line 49, in <module>
|
||||||
|
raise ImportError(msg)
|
||||||
|
ImportError:
|
||||||
|
|
||||||
|
IMPORTANT: PLEASE READ THIS FOR ADVICE ON HOW TO SOLVE THIS ISSUE!
|
||||||
|
|
||||||
|
Importing the numpy C-extensions failed. This error can happen for
|
||||||
|
many reasons, often due to issues with your setup or how NumPy was
|
||||||
|
installed.
|
||||||
|
|
||||||
|
We have compiled some common reasons and troubleshooting tips at:
|
||||||
|
|
||||||
|
https://numpy.org/devdocs/user/troubleshooting-importerror.html
|
||||||
|
|
||||||
|
Please note and check the following:
|
||||||
|
|
||||||
|
* The Python version is: Python3.10 from "/home/hdmtp/Image_recog/venv/bin/python3"
|
||||||
|
* The NumPy version is: "1.24.2"
|
||||||
|
|
||||||
|
and make sure that they are the versions you expect.
|
||||||
|
Please carefully study the documentation linked above for further help.
|
||||||
|
|
||||||
|
Original error was: libz.so.1: cannot open shared object file: No such file or directory
|
||||||
|
```
|
44
main.py
Normal file
44
main.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
import numpy as np
|
||||||
|
from tensorflow import keras
|
||||||
|
from keras.constraints import maxnorm
|
||||||
|
from keras.utils import np_utils
|
||||||
|
|
||||||
|
seed = 21
|
||||||
|
|
||||||
|
from keras.datasets import cifar10
|
||||||
|
'''
|
||||||
|
The CIFAR-10 dataset
|
||||||
|
(Canadian Institute for Advanced Research, 10 classes)
|
||||||
|
is a subset of the Tiny Images dataset and
|
||||||
|
consists of 60000 32x32 color images.
|
||||||
|
'''
|
||||||
|
|
||||||
|
# loading the data
|
||||||
|
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
|
||||||
|
|
||||||
|
#Normalize the inputs from 0-255 to between 0 and 1 by dividing by 255
|
||||||
|
x_train = x_train.astype('float32')
|
||||||
|
x_test = x_test.astype('float32')
|
||||||
|
x_train = x_train/255.0
|
||||||
|
x_test = x_test/255.0
|
||||||
|
|
||||||
|
# One-hot encode outputs
|
||||||
|
'''
|
||||||
|
Another thing we'll need to do to get the data ready for the network is to one-hot encode the values.
|
||||||
|
Lets not go into the specifics of one-hot encoding here, but for now know that the images can't be used by the network as they are,
|
||||||
|
they need to be encoded first and
|
||||||
|
one-hot encoding is best used when doing binary classification.
|
||||||
|
'''
|
||||||
|
y_train = np_utils.to_categorical(y_train)
|
||||||
|
y_test = np_utils.to_categorical(y_test)
|
||||||
|
class_num = y_test.shape[1]
|
||||||
|
|
||||||
|
|
||||||
|
model = keras.Sequential()
|
||||||
|
model.add(keras.layers.layer1)
|
||||||
|
model.add(keras.layers.layer2)
|
||||||
|
model.add(keras.layers.layer3)
|
||||||
|
|
||||||
|
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy', 'val_accuracy'])
|
||||||
|
|
||||||
|
print(model.summary())
|
Loading…
Reference in New Issue
Block a user