#!/usr/bin/env python # coding: utf-8 # # Artificial Intelligence # ## L2 International, Univ. Bordeaux # ### Lab #2, Unsupervised Learning (3) # # In this lab, we will consider hierachical clustering. # # This lab is an adaptation of: # # https://stackabuse.com/hierarchical-clustering-with-python-and-scikit-learn/ # ### Example 1. An introduction # # In this first example, we will simply use a small dataset: a set of points that we define manually. # 1. Uncomment and execute the following code. # import numpy as np # X = np.array([[5,3], # [10,15], # [15,12], # [24,10], # [30,30], # [85,70], # [71,80], # [60,78], # [70,55], # [80,91],]) # 2. Let's plot the above data points. To do so, uncomment and execute the following code: # import matplotlib.pyplot as plt # get_ipython().run_line_magic('matplotlib', 'inline') # labels = range(1, 11) # plt.figure(figsize=(10, 7)) # plt.subplots_adjust(bottom=0.1) # plt.scatter(X[:,0],X[:,1], label='True Position') # for label, x, y in zip(labels, X[:, 0], X[:, 1]): # plt.annotate( # label, # xy=(x, y), xytext=(-3, 3), # textcoords='offset points', ha='right', va='bottom') # plt.show() # Dendrograms (see https://en.wikipedia.org/wiki/Dendrogram for more information). #We first (uncomment and) execute the following code so we can observe how this tool can be used to get an idea on how our data can be clustered. # from scipy.cluster.hierarchy import dendrogram, linkage # from matplotlib import pyplot as plt # linked = linkage(X, 'single') # labelList = range(1, 11) # plt.figure(figsize=(10, 7)) # dendrogram(linked, # orientation='top', # labels=labelList, # distance_sort='descending', # show_leaf_counts=True) # plt.show() # 1. Add the following code in the suitable place #hline = np.array([[x,35] for x in range(10,80)]) #plt.scatter(hline[:,0], hline[:,1], color='red') # 2. Draw another line further down, to get more clusters. # #### Hierarchical Clustering using sklearn # # 1. Uncomment and execute the following code: # from sklearn.cluster import AgglomerativeClustering # cluster = AgglomerativeClustering(n_clusters=4, affinity='euclidean', linkage='ward') # cluster.fit_predict(X) # 2. Print the labels. #3. Uncomment and execute the following code to plot the clusters #plt.scatter(X[:,0],X[:,1], c=cluster.labels_, cmap='rainbow') # # #### Example 2. Hierarchical Clustering on Real Data # # The dataset is available at the following address: # https://www.labri.fr/~zemmari/datasets/shopping-data.csv # 1. Write \cm{python} instructions to open the dataset and visualise its content. # 2. Uncomment and execute the following script to filter the first three columns from our dataset: # data = custom_data[['Annual Income (k$)','Spending Score (1-100)']] # 3. Uncomment and execute the following code to obtain the dendogram of this dataset. #import scipy.cluster.hierarchy as shc #plt.figure(figsize=(10, 7)) #plt.title("Customer Dendograms") #dend = shc.dendrogram(shc.linkage(data, method='ward')) # 4. Add instructions to draw a horizontal line that passes through longest distance without a horizontal line. How many clusters do you expect? # 5. Use aglomeration clustering to create the clusters and plot them. # 6. Give an interpretation of the result.