# 一、推荐算法概述

• 基于内容的推荐(Content-Based Recommendation)
• 协同过滤的推荐(Collaborative Filtering Recommendation)
• 基于关联规则的推荐(Association Rule-Based Recommendation)
• 基于效用的推荐(Utility-Based Recommendation)
• 基于知识的推荐(Knowledge-Based Recommendation)
• 组合推荐(Hybrid Recommendation)

# 二、基于矩阵分解的推荐算法

## 2.3、程序实现

#!/bin/python
'''
Date:20160411
@author: zhaozhiyong
'''
from numpy import *

f = open(path)
data = []
arr = []
lines = line.strip().split("\t")
for x in lines:
if x != "-":
arr.append(float(x))
else:
arr.append(float(0))
#print arr
data.append(arr)
#print data
return data

dataMat = mat(data)
print dataMat
m, n = shape(dataMat)
p = mat(random.random((m, K)))
q = mat(random.random((K, n)))

alpha = 0.0002
beta = 0.02
maxCycles = 10000

for step in xrange(maxCycles):
for i in xrange(m):
for j in xrange(n):
if dataMat[i,j] > 0:
#print dataMat[i,j]
error = dataMat[i,j]
for k in xrange(K):
error = error - p[i,k]*q[k,j]
for k in xrange(K):
p[i,k] = p[i,k] + alpha * (2 * error * q[k,j] - beta * p[i,k])
q[k,j] = q[k,j] + alpha * (2 * error * p[i,k] - beta * q[k,j])

loss = 0.0
for i in xrange(m):
for j in xrange(n):
if dataMat[i,j] > 0:
error = 0.0
for k in xrange(K):
error = error + p[i,k]*q[k,j]
loss = (dataMat[i,j] - error) * (dataMat[i,j] - error)
for k in xrange(K):
loss = loss + beta * (p[i,k] * p[i,k] + q[k,j] * q[k,j]) / 2

if loss < 0.001:
break
#print step
if step % 1000 == 0:
print loss

return p, q

if __name__ == "__main__":

'''
p = mat(ones((4,10)))
print p
q = mat(ones((10,5)))
'''
result = p * q
#print p
#print q

print result

'''
Date:20160411
@author: zhaozhiyong
'''

from pylab import *
from numpy import *

data = []

f = open("result")
lines = line.strip()
data.append(lines)

n = len(data)
x = range(n)
plot(x, data, color='r',linewidth=3)
plt.title('Convergence curve')
plt.xlabel('generation')
plt.ylabel('loss')
show()

# 参考文献

292 篇文章59 人订阅

0 条评论

## 相关文章

2768

### 学界 | Ian Goodfellow牵头举办NIPS机器学习对抗赛，提升系统鲁棒性

via pulse2 每年年底举办的 NIPS 都是令学者为之一振的顶级学术盛会。AI 科技评论了解到，今年首次增加了一个新议程，即「NIPS 2017 Com...

3436

5262

2.3K9

1692

3855

3469

1225

3325

1152