1. <ul id="0c1fb"></ul>

      <noscript id="0c1fb"><video id="0c1fb"></video></noscript>
      <noscript id="0c1fb"><listing id="0c1fb"><thead id="0c1fb"></thead></listing></noscript>

      99热在线精品一区二区三区_国产伦精品一区二区三区女破破_亚洲一区二区三区无码_精品国产欧美日韩另类一区

      RELATEED CONSULTING
      相關(guān)咨詢
      選擇下列產(chǎn)品馬上在線溝通
      服務(wù)時(shí)間:8:30-17:00
      你可能遇到了下面的問(wèn)題
      關(guān)閉右側(cè)工具欄

      新聞中心

      這里有您想知道的互聯(lián)網(wǎng)營(yíng)銷解決方案
      使用TensorFlow實(shí)現(xiàn)SVM-創(chuàng)新互聯(lián)

      較基礎(chǔ)的SVM,后續(xù)會(huì)加上多分類以及高斯核,供大家參考。

      創(chuàng)新互聯(lián)憑借專業(yè)的設(shè)計(jì)團(tuán)隊(duì)扎實(shí)的技術(shù)支持、優(yōu)質(zhì)高效的服務(wù)意識(shí)和豐厚的資源優(yōu)勢(shì),提供專業(yè)的網(wǎng)站策劃、網(wǎng)站設(shè)計(jì)、成都網(wǎng)站設(shè)計(jì)、網(wǎng)站優(yōu)化、軟件開發(fā)、網(wǎng)站改版等服務(wù),在成都10多年的網(wǎng)站建設(shè)設(shè)計(jì)經(jīng)驗(yàn),為成都近千家中小型企業(yè)策劃設(shè)計(jì)了網(wǎng)站。

      Talk is cheap, show me the code

      import tensorflow as tf
      from sklearn.base import BaseEstimator, ClassifierMixin
      import numpy as np
      
      class TFSVM(BaseEstimator, ClassifierMixin):
      
       def __init__(self, 
        C = 1, kernel = 'linear', 
        learning_rate = 0.01, 
        training_epoch = 1000, 
        display_step = 50,
        batch_size = 50,
        random_state = 42):
        #參數(shù)列表
        self.svmC = C
        self.kernel = kernel
        self.learning_rate = learning_rate
        self.training_epoch = training_epoch
        self.display_step = display_step
        self.random_state = random_state
        self.batch_size = batch_size
      
       def reset_seed(self):
        #重置隨機(jī)數(shù)
        tf.set_random_seed(self.random_state)
        np.random.seed(self.random_state)
      
       def random_batch(self, X, y):
        #調(diào)用隨機(jī)子集,實(shí)現(xiàn)mini-batch gradient descent
        indices = np.random.randint(1, X.shape[0], self.batch_size)
        X_batch = X[indices]
        y_batch = y[indices]
        return X_batch, y_batch
      
       def _build_graph(self, X_train, y_train):
        #創(chuàng)建計(jì)算圖
        self.reset_seed()
      
        n_instances, n_inputs = X_train.shape
      
        X = tf.placeholder(tf.float32, [None, n_inputs], name = 'X')
        y = tf.placeholder(tf.float32, [None, 1], name = 'y')
      
        with tf.name_scope('trainable_variables'):
         #決策邊界的兩個(gè)變量
         W = tf.Variable(tf.truncated_normal(shape = [n_inputs, 1], stddev = 0.1), name = 'weights')
         b = tf.Variable(tf.truncated_normal([1]), name = 'bias')
      
        with tf.name_scope('training'):
         #算法核心
         y_raw = tf.add(tf.matmul(X, W), b)
         l2_norm = tf.reduce_sum(tf.square(W))
         hinge_loss = tf.reduce_mean(tf.maximum(tf.zeros(self.batch_size, 1), tf.subtract(1., tf.multiply(y_raw, y))))
         svm_loss = tf.add(hinge_loss, tf.multiply(self.svmC, l2_norm))
         training_op = tf.train.AdamOptimizer(learning_rate = self.learning_rate).minimize(svm_loss)
      
        with tf.name_scope('eval'):
         #正確率和預(yù)測(cè)
         prediction_class = tf.sign(y_raw)
         correct_prediction = tf.equal(y, prediction_class)
         accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
      
        init = tf.global_variables_initializer()
      
        self._X = X; self._y = y
        self._loss = svm_loss; self._training_op = training_op
        self._accuracy = accuracy; self.init = init
        self._prediction_class = prediction_class
        self._W = W; self._b = b
      
       def _get_model_params(self):
        #獲取模型的參數(shù),以便存儲(chǔ)
        with self._graph.as_default():
         gvars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
        return {gvar.op.name: value for gvar, value in zip(gvars, self._session.run(gvars))}
      
       def _restore_model_params(self, model_params):
        #保存模型的參數(shù)
        gvar_names = list(model_params.keys())
        assign_ops = {gvar_name: self._graph.get_operation_by_name(gvar_name + '/Assign') for gvar_name in gvar_names}
        init_values = {gvar_name: assign_op.inputs[1] for gvar_name, assign_op in assign_ops.items()}
        feed_dict = {init_values[gvar_name]: model_params[gvar_name] for gvar_name in gvar_names}
        self._session.run(assign_ops, feed_dict = feed_dict)
      
       def fit(self, X, y, X_val = None, y_val = None):
        #fit函數(shù),注意要輸入驗(yàn)證集
        n_batches = X.shape[0] // self.batch_size
      
        self._graph = tf.Graph()
        with self._graph.as_default():
         self._build_graph(X, y)
      
        best_loss = np.infty
        best_accuracy = 0
        best_params = None
        checks_without_progress = 0
        max_checks_without_progress = 20
      
        self._session = tf.Session(graph = self._graph)
      
        with self._session.as_default() as sess:
         self.init.run()
      
         for epoch in range(self.training_epoch):
          for batch_index in range(n_batches):
           X_batch, y_batch = self.random_batch(X, y)
           sess.run(self._training_op, feed_dict = {self._X:X_batch, self._y:y_batch})
          loss_val, accuracy_val = sess.run([self._loss, self._accuracy], feed_dict = {self._X: X_val, self._y: y_val})
          accuracy_train = self._accuracy.eval(feed_dict = {self._X: X_batch, self._y: y_batch})
      
          if loss_val < best_loss:
           best_loss = loss_val
           best_params = self._get_model_params()
           checks_without_progress = 0
          else:
           checks_without_progress += 1
           if checks_without_progress > max_checks_without_progress:
            break
      
          if accuracy_val > best_accuracy:
           best_accuracy = accuracy_val
           #best_params = self._get_model_params()
      
          if epoch % self.display_step == 0:
           print('Epoch: {}\tValidaiton loss: {:.6f}\tValidation Accuracy: {:.4f}\tTraining Accuracy: {:.4f}'
            .format(epoch, loss_val, accuracy_val, accuracy_train))
         print('Best Accuracy: {:.4f}\tBest Loss: {:.6f}'.format(best_accuracy, best_loss))
         if best_params:
          self._restore_model_params(best_params)
          self._intercept = best_params['trainable_variables/weights']
          self._bias = best_params['trainable_variables/bias']
         return self
      
       def predict(self, X):
        with self._session.as_default() as sess:
         return self._prediction_class.eval(feed_dict = {self._X: X})
      
       def _intercept(self):
        return self._intercept
      
       def _bias(self):
        return self._bias
      

      新聞名稱:使用TensorFlow實(shí)現(xiàn)SVM-創(chuàng)新互聯(lián)
      網(wǎng)站鏈接:http://www.ef60e0e.cn/article/gcjdp.html
      99热在线精品一区二区三区_国产伦精品一区二区三区女破破_亚洲一区二区三区无码_精品国产欧美日韩另类一区
      1. <ul id="0c1fb"></ul>

        <noscript id="0c1fb"><video id="0c1fb"></video></noscript>
        <noscript id="0c1fb"><listing id="0c1fb"><thead id="0c1fb"></thead></listing></noscript>

        鄯善县| 昌吉市| 怀来县| 五常市| 泽库县| 瑞金市| 平谷区| 蕉岭县| 中宁县| 嘉禾县| 缙云县| 寿光市| 建宁县| 塔河县| 武城县| 墨脱县| 肃北| 玉屏| 枣阳市| 辽中县| 保山市| 盐池县| 凤阳县| 柘荣县| 曲水县| 泗水县| 舒城县| 固阳县| 隆安县| 玉田县| 化隆| 苏尼特右旗| 栾川县| 文山县| 广昌县| 乌兰浩特市| 甘孜县| 理塘县| 碌曲县| 鄂伦春自治旗| 扎兰屯市|