<?xml version="1.0"?>
<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="et">
	<id>http://courses.cs.taltech.ee/w/api.php?action=feedcontributions&amp;feedformat=atom&amp;user=Kairit</id>
	<title>Kursused - Kasutaja kaastöö [et]</title>
	<link rel="self" type="application/atom+xml" href="http://courses.cs.taltech.ee/w/api.php?action=feedcontributions&amp;feedformat=atom&amp;user=Kairit"/>
	<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/pages/Eri:Kaast%C3%B6%C3%B6/Kairit"/>
	<updated>2026-04-30T08:24:44Z</updated>
	<subtitle>Kasutaja kaastöö</subtitle>
	<generator>MediaWiki 1.35.9</generator>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=304</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=304"/>
		<updated>2014-06-13T12:53:07Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=303</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=303"/>
		<updated>2014-06-10T21:19:10Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=302</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=302"/>
		<updated>2014-06-10T20:03:23Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=301</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=301"/>
		<updated>2014-06-10T20:03:01Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=300</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=300"/>
		<updated>2014-06-10T19:48:12Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=299</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=299"/>
		<updated>2014-06-09T21:36:22Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=298</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=298"/>
		<updated>2014-06-09T19:11:39Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=297</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=297"/>
		<updated>2014-06-09T15:50:15Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=296</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=296"/>
		<updated>2014-06-09T15:32:08Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=295</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=295"/>
		<updated>2014-06-09T15:18:50Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=294</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=294"/>
		<updated>2014-06-05T13:00:15Z</updated>

		<summary type="html">&lt;p&gt;Kairit: /* Lecture 11: Dimensionality reduction - PCA */&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
  Consultation:&lt;br /&gt;
  30.05.2014 at 15:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Exams: &lt;br /&gt;
  06.06.2014 at 16:00 in ICT-411&lt;br /&gt;
  13.06.2014 at 16:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Additional exam: &lt;br /&gt;
  19.06.2014 at 18:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw5.pdf|Fifth homework]] about naive Bayes is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Spambase.txt|Data]] for the fifth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw6.pdf|Sixth homework]] about support vector machines is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.princeton.edu/picasso/mats/PCA-Tutorial-Intuition_jp.pdf Tutorial on PCA]&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 14: Kernelized methods, Gaussian processes ==&lt;br /&gt;
[[Meedia:Lecture14.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 15: Process mining. The alpha algorithm ==&lt;br /&gt;
[http://courses.cs.ttu.ee/w/images/e/e3/Masinõpe15.pdf Slides]&lt;br /&gt;
&lt;br /&gt;
[http://www.processmining.org/_media/processminingbook/process_mining_chapter_05_process_discovery.pdf The alpha algorithm slides from Processmining.org]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;br /&gt;
&lt;br /&gt;
== Exam ==&lt;br /&gt;
[[Meedia:SampleExam2.pdf|Example exam questions]]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=293</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=293"/>
		<updated>2014-06-05T12:59:53Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
  Consultation:&lt;br /&gt;
  30.05.2014 at 15:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Exams: &lt;br /&gt;
  06.06.2014 at 16:00 in ICT-411&lt;br /&gt;
  13.06.2014 at 16:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Additional exam: &lt;br /&gt;
  19.06.2014 at 18:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw5.pdf|Fifth homework]] about naive Bayes is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Spambase.txt|Data]] for the fifth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw6.pdf|Sixth homework]] about support vector machines is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.princeton.edu/picasso/mats/PCA-Tutorial-Intuition_jp.pdf Tutorial on PCA]&lt;br /&gt;
[http://www.ee.columbia.edu/~dpwe/e6820/papers/HyvO00-icatut.pdf Tutorial on ICA]&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 14: Kernelized methods, Gaussian processes ==&lt;br /&gt;
[[Meedia:Lecture14.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 15: Process mining. The alpha algorithm ==&lt;br /&gt;
[http://courses.cs.ttu.ee/w/images/e/e3/Masinõpe15.pdf Slides]&lt;br /&gt;
&lt;br /&gt;
[http://www.processmining.org/_media/processminingbook/process_mining_chapter_05_process_discovery.pdf The alpha algorithm slides from Processmining.org]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;br /&gt;
&lt;br /&gt;
== Exam ==&lt;br /&gt;
[[Meedia:SampleExam2.pdf|Example exam questions]]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=292</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=292"/>
		<updated>2014-06-02T21:02:06Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:SampleExam2.pdf&amp;diff=291</id>
		<title>Fail:SampleExam2.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:SampleExam2.pdf&amp;diff=291"/>
		<updated>2014-06-02T18:08:17Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=290</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=290"/>
		<updated>2014-06-02T18:07:57Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
  Consultation:&lt;br /&gt;
  30.05.2014 at 15:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Exams: &lt;br /&gt;
  06.06.2014 at 16:00 in ICT-411&lt;br /&gt;
  13.06.2014 at 16:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Additional exam: &lt;br /&gt;
  19.06.2014 at 18:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw5.pdf|Fifth homework]] about naive Bayes is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Spambase.txt|Data]] for the fifth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw6.pdf|Sixth homework]] about support vector machines is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 14: Kernelized methods, Gaussian processes ==&lt;br /&gt;
[[Meedia:Lecture14.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 15: Process mining. The alpha algorithm ==&lt;br /&gt;
[http://courses.cs.ttu.ee/w/images/e/e3/Masinõpe15.pdf Slides]&lt;br /&gt;
&lt;br /&gt;
[http://www.processmining.org/_media/processminingbook/process_mining_chapter_05_process_discovery.pdf The alpha algorithm slides from Processmining.org]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;br /&gt;
&lt;br /&gt;
== Exam ==&lt;br /&gt;
[[Meedia:SampleExam2.pdf|Example exam questions]]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=289</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=289"/>
		<updated>2014-06-02T18:06:29Z</updated>

		<summary type="html">&lt;p&gt;Kairit: /* Lecture 15: Process mining. The alpha algorithm */&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
  Consultation:&lt;br /&gt;
  30.05.2014 at 15:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Exams: &lt;br /&gt;
  06.06.2014 at 16:00 in ICT-411&lt;br /&gt;
  13.06.2014 at 16:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Additional exam: &lt;br /&gt;
  19.06.2014 at 18:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw5.pdf|Fifth homework]] about naive Bayes is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Spambase.txt|Data]] for the fifth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw6.pdf|Sixth homework]] about support vector machines is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 14: Kernelized methods, Gaussian processes ==&lt;br /&gt;
[[Meedia:Lecture14.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 15: Process mining. The alpha algorithm ==&lt;br /&gt;
[http://courses.cs.ttu.ee/w/images/e/e3/Masinõpe15.pdf Slides]&lt;br /&gt;
&lt;br /&gt;
[http://www.processmining.org/_media/processminingbook/process_mining_chapter_05_process_discovery.pdf The alpha algorithm slides from Processmining.org]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=283</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=283"/>
		<updated>2014-05-28T17:09:14Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
  Consultation:&lt;br /&gt;
  30.05.2014 at 15:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Exams: &lt;br /&gt;
  06.06.2014 at 16:00 in ICT-411&lt;br /&gt;
  13.06.2014 at 16:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Additional exam: &lt;br /&gt;
  19.06.2014 at 18:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw5.pdf|Fifth homework]] about naive Bayes is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Spambase.txt|Data]] for the fifth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw6.pdf|Sixth homework]] about support vector machines is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 14: Kernelized methods, Gaussian processes ==&lt;br /&gt;
[[Meedia:Lecture14.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=282</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=282"/>
		<updated>2014-05-28T17:08:49Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=281</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=281"/>
		<updated>2014-05-28T14:50:29Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
  Consultation:&lt;br /&gt;
  30.05.2014 at 15:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Exams: &lt;br /&gt;
  06.06.2014 at 16:00 in ICT-411&lt;br /&gt;
  13.06.2014 at 16:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Additional exam: &lt;br /&gt;
  19.06.2014 at 18:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;NB!&amp;#039;&amp;#039;&amp;#039; No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw5.pdf|Fifth homework]] about naive Bayes is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Spambase.txt|Data]] for the fifth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw6.pdf|Sixth homework]] about support vector machines is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 14: Kernelized methods, Gaussian processes ==&lt;br /&gt;
[[Meedia:Lecture14.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Lecture14.pdf&amp;diff=280</id>
		<title>Fail:Lecture14.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Lecture14.pdf&amp;diff=280"/>
		<updated>2014-05-27T14:56:28Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=279</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=279"/>
		<updated>2014-05-27T14:55:58Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
  Exams: &lt;br /&gt;
  06.06.2014 at 16:00 in ICT-411&lt;br /&gt;
  13.06.2014 at 16:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Additional exam: &lt;br /&gt;
  19.06.2014 at 18:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;NB!&amp;#039;&amp;#039;&amp;#039; No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw5.pdf|Fifth homework]] about naive Bayes is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Spambase.txt|Data]] for the fifth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw6.pdf|Sixth homework]] about support vector machines is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 14: Kernelized methods, Gaussian processes ==&lt;br /&gt;
[[Meedia:Lecture14.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=278</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=278"/>
		<updated>2014-05-26T14:18:49Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=277</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=277"/>
		<updated>2014-05-20T14:16:33Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=276</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=276"/>
		<updated>2014-05-19T19:00:38Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=275</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=275"/>
		<updated>2014-05-19T16:58:58Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
  Exams: &lt;br /&gt;
  06.06.2014 at 16:00 in ICT-411&lt;br /&gt;
  13.06.2014 at 16:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Additional exam: &lt;br /&gt;
  19.06.2014 at 18:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;NB!&amp;#039;&amp;#039;&amp;#039; No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw5.pdf|Fifth homework]] about naive Bayes is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Spambase.txt|Data]] for the fifth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw6.pdf|Sixth homework]] about support vector machines is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=274</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=274"/>
		<updated>2014-05-19T16:58:18Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
  Exams: 06.06.2014 at 16:00 in ICT-411&lt;br /&gt;
         13.06.2014 at 16:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
  Additional exam: 19.06.2014 at 18:00 in ICT-411&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;NB!&amp;#039;&amp;#039;&amp;#039; No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw5.pdf|Fifth homework]] about naive Bayes is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Spambase.txt|Data]] for the fifth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw6.pdf|Sixth homework]] about support vector machines is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Hw6.pdf&amp;diff=273</id>
		<title>Fail:Hw6.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Hw6.pdf&amp;diff=273"/>
		<updated>2014-05-19T16:54:51Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=272</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=272"/>
		<updated>2014-05-19T16:54:34Z</updated>

		<summary type="html">&lt;p&gt;Kairit: /* Assignments */&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;NB!&amp;#039;&amp;#039;&amp;#039; No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw5.pdf|Fifth homework]] about naive Bayes is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Spambase.txt|Data]] for the fifth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw6.pdf|Sixth homework]] about support vector machines is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=271</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=271"/>
		<updated>2014-05-19T16:54:20Z</updated>

		<summary type="html">&lt;p&gt;Kairit: /* Assignments */&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;NB!&amp;#039;&amp;#039;&amp;#039; No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw5.pdf|Fifth homework]] about naive Bayes is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Spambase.txt|Data]] for the fifth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw6.pdf|Fifth homework]] about support vector machines is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Spambase.txt&amp;diff=270</id>
		<title>Fail:Spambase.txt</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Spambase.txt&amp;diff=270"/>
		<updated>2014-05-12T15:15:33Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=269</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=269"/>
		<updated>2014-05-12T15:15:21Z</updated>

		<summary type="html">&lt;p&gt;Kairit: /* Assignments */&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;NB!&amp;#039;&amp;#039;&amp;#039; No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw5.pdf|Fifth homework]] about naive Bayes is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Spambase.txt|Data]] for the fifth homework&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Hw5.pdf&amp;diff=268</id>
		<title>Fail:Hw5.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Hw5.pdf&amp;diff=268"/>
		<updated>2014-05-12T15:11:18Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=267</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=267"/>
		<updated>2014-05-12T15:11:04Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;NB!&amp;#039;&amp;#039;&amp;#039; No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw5.pdf|Fifth homework]] about naive Bayes is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=266</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=266"/>
		<updated>2014-05-10T16:53:05Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;NB!&amp;#039;&amp;#039;&amp;#039; No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 5-8, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=265</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=265"/>
		<updated>2014-05-10T15:07:52Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;NB!&amp;#039;&amp;#039;&amp;#039; No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Lecture 13: SVM and kernels ==&lt;br /&gt;
[[Meedia:Lecture13.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Lecture13.pdf&amp;diff=264</id>
		<title>Fail:Lecture13.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Lecture13.pdf&amp;diff=264"/>
		<updated>2014-05-10T15:07:05Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Lecture12.pdf&amp;diff=262</id>
		<title>Fail:Lecture12.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Lecture12.pdf&amp;diff=262"/>
		<updated>2014-05-05T14:04:56Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=261</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=261"/>
		<updated>2014-05-05T14:04:19Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;NB!&amp;#039;&amp;#039;&amp;#039; No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Lecture 11: Dimensionality reduction - PCA ==&lt;br /&gt;
&lt;br /&gt;
== Lecture 12: Support vector machines ==&lt;br /&gt;
[[Meedia:Lecture12.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes3.pdf Reading, sections 1-4, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=260</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=260"/>
		<updated>2014-04-20T18:47:27Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=259</id>
		<title>Fail:Ranking.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Ranking.pdf&amp;diff=259"/>
		<updated>2014-04-17T13:02:49Z</updated>

		<summary type="html">&lt;p&gt;Kairit: Kairit laadis üles faili &amp;amp;quot;Pilt:Ranking.pdf&amp;amp;quot; uue versiooni&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=257</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=257"/>
		<updated>2014-04-15T18:20:30Z</updated>

		<summary type="html">&lt;p&gt;Kairit: /* Lecture 10: Sequence modeling */&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;NB!&amp;#039;&amp;#039;&amp;#039; No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.ubc.ca/~murphyk/Bayes/rabiner.pdf Reading] The classic paper on HMM-s&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=256</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=256"/>
		<updated>2014-04-15T17:34:08Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;NB!&amp;#039;&amp;#039;&amp;#039; No lecture on 18.04.2014. Instead of that, we will have a joint session for solving homework problems on Thursday 17.04 starting from 14:00 in ICT-411.&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Sbp.txt&amp;diff=255</id>
		<title>Fail:Sbp.txt</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Sbp.txt&amp;diff=255"/>
		<updated>2014-04-15T17:31:24Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Hw4.pdf&amp;diff=254</id>
		<title>Fail:Hw4.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Hw4.pdf&amp;diff=254"/>
		<updated>2014-04-15T17:30:50Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=253</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=253"/>
		<updated>2014-04-15T17:30:13Z</updated>

		<summary type="html">&lt;p&gt;Kairit: /* Assignments */&lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;New!!!&amp;#039;&amp;#039;&amp;#039; Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
This will be updated as the homework results are checked. Stay in tune!&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] about decision trees is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] about KNN and K-means is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] about neural networks is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw4.pdf|Fourth homework]] about linear and logistic regression is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Sbp.txt|Data]] for the fourth homework&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=252</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=252"/>
		<updated>2014-04-15T17:27:55Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;New!!!&amp;#039;&amp;#039;&amp;#039; Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
This will be updated as the homework results are checked. Stay in tune!&lt;br /&gt;
&lt;br /&gt;
== Assignments ==&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Fail:Lecture10.pdf&amp;diff=251</id>
		<title>Fail:Lecture10.pdf</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Fail:Lecture10.pdf&amp;diff=251"/>
		<updated>2014-04-15T15:09:34Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=250</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=250"/>
		<updated>2014-04-15T15:08:51Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;New!!!&amp;#039;&amp;#039;&amp;#039; Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
This will be updated as the homework results are checked. Stay in tune!&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 9: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Lecture 10: Sequence modeling ==&lt;br /&gt;
[[Meedia:Lecture10.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=249</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=249"/>
		<updated>2014-04-09T14:43:26Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;New!!!&amp;#039;&amp;#039;&amp;#039; Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
This will be updated as the homework results are checked. Stay in tune!&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
[http://www.cs.jhu.edu/~jason/tutorials/loglin/#1 Tutorial about log-linear modeling by Jason Eisner]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
	<entry>
		<id>http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=248</id>
		<title>Machine learning</title>
		<link rel="alternate" type="text/html" href="http://courses.cs.taltech.ee/w/index.php?title=Machine_learning&amp;diff=248"/>
		<updated>2014-04-09T14:35:45Z</updated>

		<summary type="html">&lt;p&gt;Kairit: &lt;/p&gt;
&lt;hr /&gt;
&lt;div&gt;&lt;br /&gt;
Spring 2013/2014&lt;br /&gt;
&lt;br /&gt;
ITI8565: Machine learning&lt;br /&gt;
&lt;br /&gt;
Taught by: Kairit Sirts&lt;br /&gt;
&lt;br /&gt;
EAP: 6.0&lt;br /&gt;
&lt;br /&gt;
Time and place: Fridays&lt;br /&gt;
  Lectures: 16:00-17:30  X-406&lt;br /&gt;
  Labs: 17:45-19:15  X-412&lt;br /&gt;
&lt;br /&gt;
Additional information: sirts@ioc.ee, juhan.ernits@ttu.ee&lt;br /&gt;
&lt;br /&gt;
Skype: kairit.sirts&lt;br /&gt;
&lt;br /&gt;
The course is organised by [http://cs.ttu.ee the Department of Comptuer Science]. The course is supported by [http://studyitin.ee/ IT Academy].&lt;br /&gt;
&lt;br /&gt;
Students should also subscribe to [http://lists.ttu.ee/mailman/listinfo/machine-learning machine learning list]. &lt;br /&gt;
This is used to spread information about the course in this semester as well as any other machine learning related event happening in TUT (also in future).&lt;br /&gt;
&lt;br /&gt;
&amp;#039;&amp;#039;&amp;#039;New!!!&amp;#039;&amp;#039;&amp;#039; Homework rankings based on results (just for fun): [[Meedia:Ranking.pdf|Ranking]] &amp;lt;br \&amp;gt;&lt;br /&gt;
This will be updated as the homework results are checked. Stay in tune!&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
== Lecture 1: Introduction, decision trees ==&lt;br /&gt;
[[Media:lecture1.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Media:Dt_example.pdf|Example made in class]] - When to play tennis?&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch01.pdf Reading] - contains also the full algorithm for decision tree learning with divide-and-conquer strategy.&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw1.pdf|First homework]] is open in moodle. For submitting you have to register [https://moodle.e-ope.ee/course/view.php?id=6504|target=&amp;#039;_new&amp;#039; to the course]&lt;br /&gt;
&lt;br /&gt;
== Lecture 2: K nearest neighbours ==&lt;br /&gt;
[[Meedia:Lecture2.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 3: K-means clustering, MLE principle ==&lt;br /&gt;
[[Meedia:Lecture3.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch02.pdf Reading I]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch13.pdf Reading II]&lt;br /&gt;
&lt;br /&gt;
== Lecture 4: Gaussian Mixture Model, EM algorithm ==&lt;br /&gt;
[[Meedia:Lecture4.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch14.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw2.pdf|Second homework]] is open in moodle.&lt;br /&gt;
&lt;br /&gt;
== Lecture 5: History of neural networks, perceptron ==&lt;br /&gt;
[[Meedia:Lecture5.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch03.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
== Lecture 6: Artificial neural networks ==&lt;br /&gt;
[[Meedia:Lecture6.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Bp_math.pdf|Backpropagation notes]]&lt;br /&gt;
&lt;br /&gt;
[http://ciml.info/dl/v0_8/ciml-v0_8-ch08.pdf Reading]&lt;br /&gt;
&lt;br /&gt;
[[Media:Hw3.pdf|Third homework]] is open in moodle.&lt;br /&gt;
&lt;br /&gt;
[https://www.dropbox.com/sh/50sioj7j8z7rwfn/s_iLJ6VlA0 Data] for the third homework&lt;br /&gt;
&lt;br /&gt;
== Lecture 7: Linear regresssion ==&lt;br /&gt;
[[Meedia:Lecture7.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Logistic regresssion ==&lt;br /&gt;
[[Meedia:Lecture8.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
== Lecture 8: Naive Bayes, maximum entropy model ==&lt;br /&gt;
[[Meedia:Lecture9.pdf|Slides]]&lt;br /&gt;
&lt;br /&gt;
[http://see.stanford.edu/materials/aimlcs229/cs229-notes2.pdf Reading about Naive Bayes, section 2, lecture notes by Andrew Ng]&lt;br /&gt;
&lt;br /&gt;
== Additional links ==&lt;br /&gt;
[[Meedia:Latex_example.pdf|Latex example]]&lt;br /&gt;
&lt;br /&gt;
[[Meedia:Latex_example.tex|Latex example code]]&lt;br /&gt;
&lt;br /&gt;
[http://www.maths.tcd.ie/~dwilkins/LaTeXPrimer/ Latex tutorial]&lt;br /&gt;
&lt;br /&gt;
[http://arkitus.com/patterns-for-research-in-machine-learning/ Tips for scientific programming]&lt;/div&gt;</summary>
		<author><name>Kairit</name></author>
	</entry>
</feed>