Advertisement
brandblox

RAGE engine

Apr 27th, 2024 (edited)
887
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 6.37 KB | None | 0 0
  1. # using logic
  2. import math
  3.  
  4. def get_input():
  5.     size = int(input("Enter length of array: "))
  6.     num_data = []
  7.     for i in range(size):
  8.         number = int(input("Enter element {}: ".format(i+1)))
  9.         num_data.append(number)
  10.     return num_data
  11.  
  12. def mean(data):
  13.     return sum(data) / len(data)
  14.  
  15. def variance(data):
  16.     mu = mean(data)
  17.     return sum((x - mu) ** 2 for x in data) / len(data)
  18.  
  19. def std_deviation(data):
  20.     return math.sqrt(variance(data))
  21.  
  22. data = get_input()
  23. print("Mean:", mean(data))
  24. print("Variance:", variance(data))
  25. print("Standard Deviation:", std_deviation(data))
  26.  
  27. #using library
  28. import numpy as np
  29.  
  30. def get_input():
  31.     size = int(input("Enter length of array: "))
  32.     num_data = []
  33.     for i in range(size):
  34.         number = int(input("Enter element {}: ".format(i+1)))
  35.         num_data.append(number)
  36.     return num_data
  37.  
  38. def mean(data):
  39.     return np.mean(data)
  40.  
  41. def variance(data):
  42.     return np.var(data)
  43.  
  44. def std_deviation(data):
  45.     return np.std(data)
  46.  
  47. data = get_input()
  48. print("Mean:", mean(data))
  49. print("Variance:", variance(data))
  50. print("Standard Deviation:", std_deviation(data))
  51.  
  52.  
  53. ################################Linear##################################
  54.  
  55. import numpy as np
  56. import matplotlib.pyplot as plt
  57.  
  58. def estimate_coef(x, y):
  59.     # number of observations/points
  60.     n = np.size(x)
  61.    
  62.     # mean of x and y vector
  63.     m_x = np.mean(x)
  64.     m_y = np.mean(y)
  65.    
  66.     # calculating cross-deviation and deviation about x
  67.     SS_xy = np.sum(y * x) - n * m_y * m_x
  68.     SS_xx = np.sum(x * x) - n * m_x * m_x
  69.    
  70.     # calculating regression coefficients
  71.     b_1 = SS_xy / SS_xx
  72.     b_0 = m_y - b_1 * m_x
  73.     print("b_0", b_0)
  74.     print("b_1", b_1)
  75.     return (b_0, b_1)
  76.  
  77. def plot_regression_line(x, y, b):
  78.     # plotting the actual points as scatter plot
  79.     plt.scatter(x, y, color="m", marker="o", s=30)
  80.    
  81.     # predicted response vector
  82.     y_pred = b[0] + b[1] * x
  83.    
  84.     # plotting the regression line
  85.     plt.plot(x, y_pred, color="g")
  86.    
  87.     # putting labels
  88.     plt.xlabel('x')
  89.     plt.ylabel('y')
  90.    
  91. # observations / data
  92. x = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
  93. y = np.array([1, 3, 2, 5, 7, 8, 8, 9, 10, 12])
  94.    
  95. # estimating coefficients
  96. b = estimate_coef(x, y)
  97. plot_regression_line(x, y, b)
  98.  
  99. output:
  100. https://ibb.co/xmMGCbm
  101.  
  102. # Predict the speed of a 10-year old car.
  103. import matplotlib.pyplot as plt
  104. from scipy import stats
  105.  
  106. x = [5, 7, 8, 7, 2, 17, 2, 9, 4, 11, 12, 9, 6]
  107. y = [99, 86, 87, 88, 111, 86, 103, 87, 94, 78, 77, 85, 86]
  108.  
  109. slope, intercept, r, p, std_err = stats.linregress(x, y)
  110.  
  111. def myfunc(x):
  112.     return slope * x + intercept
  113.  
  114. mymodel = list(map(myfunc, x))
  115.  
  116. plt.scatter(x, y)
  117. plt.plot(x, mymodel)
  118. plt.show()
  119.  
  120. speed = myfunc(10)
  121. print(speed)
  122.  
  123. # Example of bad-fit
  124. import matplotlib.pyplot as plt
  125. from scipy import stats
  126.  
  127. x = [89,43,36,36,95,10,66,34,38,20,26,29,48,
  128.      64,6 ,5 ,36 ,66 ,72 ,40]
  129. y = [21 ,46 ,3 ,35 ,67 ,95 ,53 ,72 ,58 ,10 ,
  130.      26 ,34 ,90 ,33 ,38 ,20 ,56 ,2 ,47 ,15]
  131.  
  132. slope, intercept,r,p,std_err = stats.linregress(x,y)
  133.  
  134. def myfunc(x):
  135.     return slope * x + intercept
  136.  
  137. mymodel = list(map(myfunc,x))
  138.  
  139. plt.scatter(x,y)
  140. plt.plot(x,mymodel)
  141. plt.show()
  142.  
  143. print(r)
  144.  
  145.  
  146. ########################skewness###########################3
  147.  
  148. import numpy as np
  149. import pandas as pd
  150. import seaborn as sns
  151. # Example dataset
  152. diamonds = sns.load_dataset("diamonds")
  153. diamond_prices = diamonds["price"]
  154. mean_price = diamond_prices.mean()
  155. median_price = diamond_prices.median()
  156. std = diamond_prices.std()
  157. skewness = (3 * (mean_price - median_price)) / std
  158. print(f"The Pierson's second skewness score of diamond prices distribution is {skewness:.5f}")
  159. #The Pierson's second skewness score of diamond prices distribution is 1.15189
  160. def moment_based_skew(distribution):
  161.  n = len(distribution)
  162.  mean = np.mean(distribution)
  163.  std = np.std(distribution)
  164.  
  165.  # Divide the formula into two parts
  166.  first_part = n / ((n - 1) * (n - 2))
  167.  second_part = np.sum(((distribution - mean) / std) ** 3)
  168.  
  169.  skewness = first_part * second_part
  170.  return skewness
  171. skew = moment_based_skew(diamond_prices)
  172. print("The moment_based skewness score of diamond prices distribution is ", skew)
  173. # Using Libraries
  174. # Pandas version
  175. print("The moment_based skewness skewness score of diamond prices distribution is ",
  176. diamond_prices.skew())
  177. # SciPy version
  178. from scipy.stats import skew
  179. print("The moment_based skewness skewness score of diamond prices distribution is ",
  180. skew(diamond_prices))
  181. # Visualization
  182. import matplotlib.pyplot as plt
  183. sns.kdeplot(diamond_prices)
  184. plt.title("Plot of diamond prices")
  185. plt.xlabel("Price ($)")
  186.  
  187.  
  188.  
  189.  
  190. ##############################simplex###############################3
  191. #Assignment: Write a Python Program to solve Linear Programming Problem using
  192. #Simplex Method
  193. import numpy as np
  194.  
  195. def simplex_method(A, b, c):
  196.     m, n = A.shape
  197.     # Create the initial tableau
  198.     tableau = np.hstack([A, np.eye(m), b.reshape(-1, 1)])
  199.     tableau = np.vstack([tableau, np.concatenate([c, np.zeros(m + 1)])])
  200.    
  201.     while True:
  202.         # Find the pivot column
  203.         pivot_col = np.argmin(tableau[-1, :-1])
  204.        
  205.         # If all elements in the last row are non-negative, optimal solution found
  206.         if np.all(tableau[-1, :-1] >= 0):
  207.             break
  208.        
  209.         # Find the pivot row
  210.         ratios = tableau[:-1, -1] / tableau[:-1, pivot_col]
  211.         pivot_row = np.argmin(ratios)
  212.        
  213.         # Perform pivot operation
  214.         tableau[pivot_row, :] /= tableau[pivot_row, pivot_col]
  215.        
  216.         for i in range(m + 1):
  217.             if i != pivot_row:
  218.                 tableau[i, :] -= tableau[i, pivot_col] * tableau[pivot_row, :]
  219.    
  220.     return tableau[-1, -1], tableau[-1, :-1]
  221.  
  222. A = np.array([[2, 1], [1, 2]])
  223. b = np.array([4, 3])
  224. c = np.array([-3, -5])
  225.  
  226. optimal_value, optimal_solution = simplex_method(A, b, c)
  227.  
  228. print("Optimal value:", optimal_value)
  229. print("Optimal solution:", optimal_solution)
  230.  
  231.  
  232.  
  233.  
  234. #Liner regression
  235. import numpy as np
  236. from sklearn.linear_model import LinearRegression
  237.  
  238. years = np.array([[1], [2], [3], [4], [5]])
  239. speeds = np.array([30, 45, 45, 55, 65])  
  240.  
  241.  
  242. model = LinearRegression()
  243. model.fit(years, speeds)
  244.  
  245. x= 15
  246. predicted_speed = model.predict([[x]])
  247.  
  248. print(f"Predicted speed after {x} years:", predicted_speed[0], "km/h")
  249.  
  250.  
  251.  
  252.  
  253.  
  254.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement