problem1_3.py
import sys
import csv
def main():
if len(sys.argv) != 3:
print("Usage: python3 problem1_3.py
[input_file] [output_file]")
return
sign = lambda x: (1,-1)[x<=0]
ifile = open(sys.argv[1],'rt')
reader = csv.reader(ifile)
ofile = open(sys.argv[2],'wt')
writer = csv.writer(ofile)
b = 0
w1 = 0
w2 = 0
b_o = -1
w1_o = -1
w2_o = -1
while b != b_o or w1 != w1_o or w2 != w2_o:
b_o = b
w1_o = w1
w2_o = w2
ifile.seek(0)
for row in reader:
x1 =
int(row[0])
x2 =
int(row[1])
y =
int(row[2])
f = sign(b +
w1*x1 + w2*x2)
if y*f <=
0:
b += y
w1 += y*x1
w2 += y*x2
row = [w1, w2, b]
writer.writerow(row)
ifile.close()
ofile.close()
if __name__ == '__main__':
main()
problem2_3.py
import sys
import csv
import numpy as np
def main():
if len(sys.argv) != 3:
print("Usage: python3 problem2_3.py
[input_file] [output_file]")
return
ifile = open(sys.argv[1],'rt')
reader = csv.reader(ifile)
n = 0
x1 = []
x2 = []
y = []
for row in reader:
n += 1
x1.append(float(row[0]))
x2.append(float(row[1]))
y.append(float(row[2]))
ifile.close()
x1 = (x1 - np.mean(x1)) / np.std(x1)
x2 = (x2 - np.mean(x2)) / np.std(x2)
ofile = open(sys.argv[2],'wt')
writer = csv.writer(ofile)
for a in [0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 5,
10]:
b = [0, 0, 0]
for i in range(0,100):
s = [0, 0,
0]
for k in range
(0,n):
d = b[0] + b[1]*x1[k] + b[2]*x2[k] - y[k]
s[0] += d
s[1] += d * x1[k]
s[2] += d * x2[k]
b[0] -= a * s[0]
/ n
b[1] -= a * s[1]
/ n
b[2] -= a * s[2]
/ n
row = [a, 100, b[0], b[1],
b[2]]
writer.writerow(row)
a = 1
b = [0, 0, 0]
for i in range(0,20):
s = [0, 0, 0]
for k in range (0,n):
d = b[0] +
b[1]*x1[k] + b[2]*x2[k] - y[k]
s[0] += d
s[1] += d *
x1[k]
s[2] += d *
x2[k]
b[0] -= a * s[0] / n
b[1] -= a * s[1] / n
b[2] -= a * s[2] / n
row = [a, 20, b[0], b[1], b[2]]
writer.writerow(row)
ofile.close()
if __name__ == '__main__':
main()
Imagine you have a 6-class classification problem, where the dataset contains 9 input features. Y...
5. Imagine you have a 6-class classification problem, where the dataset contains 9 input features. You decide to build a classifier using a "mixture of mixtures", i.e. using a Gaussian mixture model for each likelihood (p(x(9). 3 mixture components are used with diagonal covariance mlatrices for each mixture model. Calculate the total number of model parameters in the classifier (do not consider priors) 5. Imagine you have a 6-class classification problem, where the dataset contains 9 input features. You decide...
Imagine that you are in class and have a sudden urge to gulp down a hot fudge sundae. Some part of you really wants to just grab a giant sundae and start eating it right this second. But then, another part of you realizes that a giant hot fudge sundae has lots and lots of calories in it, and makes you feel weak and even guilty for even considering eating something like this. Ultimately, you decide to wait until class...