From 8264de7fc54c9b566d48126d87b4d0dd1da9b84c Mon Sep 17 00:00:00 2001 From: Rinsa Date: Sat, 27 Jun 2020 08:06:23 +0530 Subject: [PATCH] Renamed files , added task4 and 5 --- .../daily tasks/Rinsa Fathima CM/day2_task.py | 14 -- .../Rinsa Fathima CM/image/flower.jpeg | Bin 0 -> 5160 bytes Tasks/daily tasks/Rinsa Fathima CM/task2.py | 29 ++-- Tasks/daily tasks/Rinsa Fathima CM/task3.py | 21 +++ Tasks/daily tasks/Rinsa Fathima CM/task4.py | 138 ++++++++++++++++++ Tasks/daily tasks/Rinsa Fathima CM/task5.py | 28 ++++ 6 files changed, 198 insertions(+), 32 deletions(-) delete mode 100644 Tasks/daily tasks/Rinsa Fathima CM/day2_task.py create mode 100644 Tasks/daily tasks/Rinsa Fathima CM/image/flower.jpeg create mode 100644 Tasks/daily tasks/Rinsa Fathima CM/task3.py create mode 100644 Tasks/daily tasks/Rinsa Fathima CM/task4.py create mode 100644 Tasks/daily tasks/Rinsa Fathima CM/task5.py diff --git a/Tasks/daily tasks/Rinsa Fathima CM/day2_task.py b/Tasks/daily tasks/Rinsa Fathima CM/day2_task.py deleted file mode 100644 index e13caa7..0000000 --- a/Tasks/daily tasks/Rinsa Fathima CM/day2_task.py +++ /dev/null @@ -1,14 +0,0 @@ -import numpy as np -import torch - -#creating numpy arrays -a=np.random.randint(15,size=(5,3)) -b=np.random.randint(5,size=(3,4)) - -#converting numpy arrays to torch tensors -c=torch.from_numpy(a) -d=torch.from_numpy(b) - -#multiplying torch tensors -product=torch.mm(c,d) -print(product) diff --git a/Tasks/daily tasks/Rinsa Fathima CM/image/flower.jpeg b/Tasks/daily tasks/Rinsa Fathima CM/image/flower.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..fa0531da5be0604a1f683557aee7f33e1887626e GIT binary patch literal 5160 zcmY*dby(9~`~MC`cOwmBNR19j$+;mlUs-uTtN_eLa3mZ6fdBw>xqypBAQqsZq+(@d zYpw1Kiz-mKe~VMU*@70V4(&&L2Tq85rB*ZM9u=bAOgGq zKt>Mwr_}#}j2r-gDJZEfjq1z*8Hfx_P6;Nb0+au90YnCnvw&HJ3z7&nP2M==DwQT`U0f#(PBXMV7D=70WgtgDxL8yaG}}1A#iMACGhM{wl8uje zE5S`T>p+LvK-5~)~%-fhLBEWak%wzhUq zSB&>9{eTI$lUs<2q;uAL^<)y# zd#dyZd9kAL`22jV3u>`E2cx~NvobdP#EuKA3u4ehII zcDi*#t&99l^Yz6kd(%I_q!A~@1-x^T+;w?lJa*pL{;hR`uW_OiRlK?ykf#E%etkDA zw@yFyyHlOm5ONM0CP|6drd3mszl}{)v}jw-eKE6w%qm}%%xjdM?G6e%(0c|&P_=30 z3_3-(2Z3znZB3m_nmk9Utslwm4Y1@jXGN*QZb7mu(PoqI61neNqS()C`6wI&5vI^v zX?&fZUr>m33jONXV>uAR`gw*8g6JE_2zc{bdEd|LfBUzazaL zxzlxAAKpJ*&^fwwAMAy!~g3`f*LUMXIT^Ex}l%vkn6msS*Rp@igCmR*$LHsnwT<2nyI-!5-1ufpZ{d>RA4&9(7$UuY|8F!=2``_pV zb#-Pqe~t`b84Otewky9fq7@($=`i`_?0jl~$dWsEJI5Y=0o*3Gd4v4d_Yx)kV2K@ZZZ!*Ne}K6>BZkHIbF-j}_-uzuXybf9^(Rtl$|a=Ti4w z_KOw*Kbu6^C>!>iD)9x`uL+ulvHGcge;qf`ZzntkmpS{^y6>R~5xVa*ZDEFw@oQI0 z-`a)yjvlLE?eM^nnnnc8pV`fb$!X`EZ14jDN2FwIcW^6TUu35?W;A^D8A)6a$~P&z z&uSyL*Ve5jclz<8%*UNzxkHJWt=wWv$uPX|mRZI6Ohf*b>aUVR8;_GI^Ag4L*zI>B z(^f%@R}YvTF1~joN~#=R&9447L3!gde#-DAI7V)hsrgsbNoEnH0)8l@qy>a~ZjP?{ z8qk|nX*k%yrBbyJMn17PSs*5N<;2IAEL-^DUx$WhRbd5cv$g z?9vCsqrV?$$nS9pNiI54^uCI}eURzz{cb1pvDxq{_T>do^7`jP%fz}B@kf*q{eNU* z`QC6R0O^PZ?H!$kwgO#l|0 zl!bp#@N4~5p_D#;gL?m)TLYS}dn=Y(bLJz0L-9*%R+T|pt0Lw5PKJX&(;WQvDh_y3 zT@dwdsjsRL;#gW#<$1Q zQhdR=&F!$u;j&|3Yc#)ogB7cR=j6~!6aTFZQQ0ze^$m|YK2#igN96m^x^#>IIMa#N zZU*ZUqsf6J0@($yIR7a<)kfO?UJ)NBb9;(z)?0WNh%#Hj=D14fY@A&l+%OkH8mChz zOS8iU=rN9QS(qM6;tdfhuzc$W(oUQ>gc6%3*`HVNJs%QHYB|c_h)*^}QBlh%(KHF# z-S|y14jkP!o#{c`=y0ENL{OJZoNfwqZn-qhAc)h?ecEOH0~^{&=Og>1iB@N%b9s!5H#P#;mNwh$tofo~wQA)> z{|!MNvTT>Z2@NKI$-RHZ$UgrXQ|+NWH9@q56cIPyKhJ5ZImcLX40zVOZRe*F5mYFV z<^ORl7k~Ed&2ZDK$+c0q(GZ_w}b?QJR|8s3qR01|oH&d(4 zFC9(9Ov`Nf;ZbKIzwAc9RnqRM%UNX0_9{h!RN0a*^#R}7*gy;#lXCq_S3y@t8jTry znQhc;k=Qd+c?zla)NEdd0}S6z`Cn{tE!X$&)Txf0G0klB?B@0;yM~mnM(Mj-Hl#F- z>8Ysl@J`sAPO@PrA9MOgHYUjiI!}028hyZ2j39j?w(zUJMXGKWsTC~jn*2>2XDg29 zSxM<+HJ$6X;88|JK12$CRSU^DIFpq`exCZ8BOGm9?v6*%q+hYnV|g;bz(UO}&cHZM ztz<0ZRAwi@V?3oZ#=!Y>HpG)AX;QprA;x?pDNwwP?TYX;I4|f+D{~!<=o~;JdPrt< zbUp4pH;L#7Q-y4mo*M^Qvn(J5O?MnIGvx8@-;bmxO!F7-rx)dS^>&vaC#!9@c^S_3 zf#9p@#n%%fg-~VaDY=hr3I(AlmLl#ve6?9iVVTkuSS=cFik&pi#5KcP8VWPj4>2`k zNSTm5m91e@JO2e`z6O=^vK$NR^U=d@pU6L!n|y((Yh9J$9>*VHlHTQF_a>WX&&;ZJ z8QI$;8hBg`mlmyEx&;p^knW+Vt9(m8%?VFl{O}^wZ2l43IP+{Aex(!e{@36dI|FbI z{$_1x_V&Y`Ac5buU4AuXczX{wI=@xBJ8_njGv@rL=}|E6?w_)dy;iFJ-Hm z(|HA*srf1qUF=-n4R5MyjxQ@8}5Zf*KAkJvDOjEBEFHcnD&|?J|Xt9uNls47cC1f# z&62tETz^c=Hpf_GUxR2!b9yT$jY07(qf*MTce-p{$AAtRg(fD^Joi=` zRy(Cj+Uz4aZKolcC)VmCHZ^DW!{n)v4?2!`lSWpQmp5F?nz!TIvbvRiVHxeP4mhLA z36n;rL>utyMTeAhO9_VXTXr=szfmZq!6Xtx+*%a>gV#=nqi~^{;ig>9&HdSo>6@co zf{Nx#kKc$N7VWGZA1UPGeJASuRyp7a0+3oQm!u0|3GBTc_D$j|9Y4k^D7914b)4^9V1 z^MhJc(V>5tqP_DPHXW0pdt39pa9oJwU|dKm|D3zZ)wiS1t#XBQ6|QEVh0tTyb0N`V zBbRTdEi7S2NK`swRoFGTSyeGMXL%e1-G~YF#e84M%16X_gz)N#~4d%p5>2*)}xXKHl=ip`AKwRkryw2jILSN)tOa#OANzV-p@1$-h zvuTV*N~k*Q9CX5}21P;gkuD)&KAa+zTt6f5QM%np*rHiBPkZI>dfvz3l%vz zLS129lG@O%D*R+l!H96RnyTiUd!agJ%JX%sLP0GT*o%7IU^00vtCs15+>(d#qZ3+H zwgISDQ~+%%*O|VYedzRYV$ih0=tN#v;^7PR!pCiBfy$b9!Bwv*dgy})FL)M96(^Kb zPE0kHc+AZls*rvzK*dMe#zbAaK@KE>1+sT;Hn3JlL|*C7NFkF%IRRUgH7sd<}-|vAr0dw5)JJbZAKKv z-OIwQ@l-MDN|32?onPPU>NYk|#vOVWVr z9r_I?_WTxv=RmWyq8L)FaT^iBG{DZuX)78%s<5vsf)ry(cBVu^gvscrrLj75U;MZc zx}sP>p?~bM3<$K6MKnDrNd#Sib!usVR$!fxo=!;7f%0#nSRGFL$|%uHI$aQVIZ_1S tDsLbsiWQzB+t+c$B9rA5kXWQxiwR}h>WRX7^r&c*Qss!Ki{Qol{{iK#O*;Sp literal 0 HcmV?d00001 diff --git a/Tasks/daily tasks/Rinsa Fathima CM/task2.py b/Tasks/daily tasks/Rinsa Fathima CM/task2.py index 201d9d5..e13caa7 100644 --- a/Tasks/daily tasks/Rinsa Fathima CM/task2.py +++ b/Tasks/daily tasks/Rinsa Fathima CM/task2.py @@ -1,21 +1,14 @@ +import numpy as np import torch -import torch.nn as nn -class Net(nn.Module): - def __init__(self): - super(Net,self).__init__() - self.input=nn.Linear(400,200) - self.hidden1=nn.Linear(200,100) - self.sigmoid=nn.Sigmoid() - self.hidden2=nn.Linear(100,50) - self.output=nn.Linear(50,25) +#creating numpy arrays +a=np.random.randint(15,size=(5,3)) +b=np.random.randint(5,size=(3,4)) - def forward(self,x): - x=self.input(x) - x=self.hidden1(x) - x=self.sigmoid(x) - x=self.hidden2(x) - x=self.output(x) - return x -model=Net() -print(model) \ No newline at end of file +#converting numpy arrays to torch tensors +c=torch.from_numpy(a) +d=torch.from_numpy(b) + +#multiplying torch tensors +product=torch.mm(c,d) +print(product) diff --git a/Tasks/daily tasks/Rinsa Fathima CM/task3.py b/Tasks/daily tasks/Rinsa Fathima CM/task3.py new file mode 100644 index 0000000..201d9d5 --- /dev/null +++ b/Tasks/daily tasks/Rinsa Fathima CM/task3.py @@ -0,0 +1,21 @@ +import torch +import torch.nn as nn + +class Net(nn.Module): + def __init__(self): + super(Net,self).__init__() + self.input=nn.Linear(400,200) + self.hidden1=nn.Linear(200,100) + self.sigmoid=nn.Sigmoid() + self.hidden2=nn.Linear(100,50) + self.output=nn.Linear(50,25) + + def forward(self,x): + x=self.input(x) + x=self.hidden1(x) + x=self.sigmoid(x) + x=self.hidden2(x) + x=self.output(x) + return x +model=Net() +print(model) \ No newline at end of file diff --git a/Tasks/daily tasks/Rinsa Fathima CM/task4.py b/Tasks/daily tasks/Rinsa Fathima CM/task4.py new file mode 100644 index 0000000..b872c5c --- /dev/null +++ b/Tasks/daily tasks/Rinsa Fathima CM/task4.py @@ -0,0 +1,138 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision +import torchvision.transforms as transforms +import torch.optim as optim + +transform = transforms.Compose( + [ + transforms.ToTensor(), + transforms.Normalize( + (0.5, 0.5, 0.5), + (0.5, 0.5, 0.5) + ) + ] +) + +trainset = torchvision.datasets.CIFAR10( + root='./data', + train=True, + download=False, + transform=transform +) + +testset = torchvision.datasets.CIFAR10( + root='./data', + train=False, + download=False, + transform=transform +) + +trainloader = torch.utils.data.DataLoader( + trainset, + batch_size=4, + shuffle=True, + num_workers=2 +) + +testloader = torch.utils.data.DataLoader( + testset, + batch_size=4, + shuffle=False, + num_workers=2 +) + +classes = ( + 'plane', 'car', 'bird', 'cat', + 'deer', 'dog', 'frog', 'horse', 'ship', 'truck' +) + +class Net(nn.Module): + def __init__(self): + super(Net, self).__init__() + self.conv1 = nn.Conv2d(3, 6, 5) + self.pool = nn.MaxPool2d(2, 2) + self.conv2 = nn.Conv2d(6, 16, 5) + self.fc1 = nn.Linear(16 * 5 * 5, 120) + self.fc2 = nn.Linear(120, 84) + self.fc3 = nn.Linear(84, 10) + + def forward(self, x): + x = self.pool(F.relu(self.conv1(x))) + x = self.pool(F.relu(self.conv2(x))) + x = x.view(-1, 16 * 5 * 5) + x = F.relu(self.fc1(x)) + x = F.relu(self.fc2(x)) + x = self.fc3(x) + return x + + +net = Net() + +loss_function = nn.CrossEntropyLoss() +optimizer = optim.SGD( + net.parameters(), + lr=0.001 +) + +for epoch in range(2): + running_loss = 0.0 + for i, data in enumerate(trainloader, 0): + # data = (inputs, labels) + inputs, labels = data + optimizer.zero_grad() + + outputs = net(inputs) + loss = loss_function(outputs, labels) + loss.backward() + optimizer.step() + + running_loss = running_loss + loss.item() + if i % 2000 == 1999: + print( + '[%d, %5d] loss: %.3f' % + (epoch + 1, i+1, running_loss/2000) + ) + running_loss = 0.0 +print("vola") + +correct = 0 +total = 0 +with torch.no_grad(): + for data in testloader: + images, labels = data + outputs = net(images) + _, predicted = torch.max(outputs.data, 1) + total += labels.size(0) + correct += (predicted == labels).sum().item() + +print('Accuracy of the network on the 10000 test images: %d %%' % ( + 100 * correct / total)) + +''' +original code : +epochs=2 , batch_size=4 , lr=0.001 , loss=1.887 , accuracy=32% + +changing learning rate: +lr=0.0001 , loss=2.299 , accuracy=10% +lr=0.01 , loss=1.312 , accuracy=54% +lr=0.1 , loss=1.961 , accuracy=24% + +changing batch size: +batch_size=2 , loss=1.537 , accuracy=43% +batch_size=1 , loss=1.368 , accuracy=51% +batch_size=8 , loss=2.145 , accuracy=25% + +changing number of epochs: +epochs=1 , loss=2.292 , accuracy=15% +epochs=6 , loss=1.395 , accuracy=50% + +changing kernel size of conv2d: +kernel_size=3, loss=1.80 , accuracy=35% + +changing output channels: +output_channels=(10,20) , loss=1.183 , accuracy=34% +output_channels=(6,10) , loss=1.189 , accuracy=33% + +''' \ No newline at end of file diff --git a/Tasks/daily tasks/Rinsa Fathima CM/task5.py b/Tasks/daily tasks/Rinsa Fathima CM/task5.py new file mode 100644 index 0000000..b11c4f5 --- /dev/null +++ b/Tasks/daily tasks/Rinsa Fathima CM/task5.py @@ -0,0 +1,28 @@ +import torch +from PIL import Image +from torchvision import transforms +import torchvision.transforms.functional as F + +transform = transforms.Compose([ +transforms.Resize(300), +transforms.RandomCrop(200), +transforms.ColorJitter(brightness=0.7, contrast=0.3, saturation=0.3, hue=0.3), +transforms.RandomRotation((-60,60), resample=False, expand=False, center=None, fill=None), +transforms.RandomHorizontalFlip(), +transforms.RandomVerticalFlip(), + transforms.ToTensor(), + transforms.Normalize( + (0.5, 0.5, 0.5), + (0.5, 0.5, 0.5) + ), + + +]) + +path="image/flower.jpeg" +img=Image.open(path) + +img = transform(img) + +a = F.to_pil_image(img) +a.show() \ No newline at end of file