### Using Tensorflow for regression
import tensorflow as tf
# Define the intercept and slope
intercept = tf.Variable(0.1, np.float32)  # Initial guess, it will be the target of optimization
slope = tf.Variable(0.1, np.float32) # Initial guess, it will be the target of optimization
opt = tf.keras.optimizers.Adam(learning_rate=0.01) # Optimizer algorithm that reduces loss function
# Define a linear regression model
def linear_regression(intercept, slope, features = X):
    return intercept + features*slope # y' = m*X + c
# Compute the predicted values and loss
def loss_function(intercept, slope, targets, features): 
    predictions = linear_regression(intercept, slope)  # y' = m*X + c
    return tf.keras.losses.mse(targets, predictions) # the error between y and y' 
for j in range(1000):       # Do for 1000 iterations (Can be considered epochs)
    for batch in pd.read_csv('dataset.csv', chunksize=100): # Load the data in batches
        y = np.array(batch['target'], np.float32) 
        X = np.array(batch['feature'], np.float32)
        opt.minimize(lambda: loss_function(intercept, slope, y, X), var_list=[intercept, slope]) # Run optimization algorithm to minimize loss function
        print(loss_function(intercept, slope)) # Verbose that will print reduced error after each iteration
print(intercept.numpy(), slope.numpy())  # Print the trained parameters

### Note: the opt.minimize() code can also be broken as
# with tf.GradientTape() as tape:
        # loss = loss_function(intercept, slope, price_batch, size_batch)
    # 
    # gradients = tape.gradient(loss, [intercept, slope])
    # opt.apply_gradients(zip(gradients, [intercept, slope]))

### Using tensorflow for simple example of deep learning
features = tf.Variable(tf.random.normal([50, 10])) # A dataset with 50 rows and 10 columns
weights = tf.Variable(tf.random.normal([10, 30])) # weights should be in shape so that can be matrix multiplied (format = col X choice) 10 inout neuron comes to your choice of hidden layer neurons
bias = tf.Variable(tf.zeros([30]))  # The bias should have same number of rows as the column of matrix multiplication of features and weights
opt = tf.keras.optimizers.RMSprop(learning_rate=0.01, momentum=0.9) # Optimizer algorithm that reduces loss function

def model(bias, weights, features ):
    product = tf.matmul(features, weights)
    return tf.keras.activations.sigmoid(product+bias) # Forward propagation with activation function that returns y'

def loss_function(bias, weights, targets, features):
    predictions = model(bias, weights) # Calculating y'
    return tf.keras.losses.binary_crossentropy(targets, predictions) # Calculating error between y and y'

for j in range(1000):       # Do for 1000 iterations (Can be considered epochs)
    for batch in pd.read_csv('dataset.csv', chunksize=100): # Load the data in batches
        y = np.array(batch['target'], np.float32) 
        X = np.array(batch['feature'], np.float32)
        opt.minimize(lambda: loss_function(bias, weights), var_list=[bias, weights]) # Run optimization algorithm to minimize loss function
        print(loss_function(intercept, slope)) # Verbose that will print reduced error after each iteration
print(intercept.numpy(), slope.numpy())  # Print the trained parameters

### Example of creating multiple Dense layers sequentially
features = np.array([[2., 2., 43.]], dtype=np.float32) # should be constants, also: tf.constant(features). This is the inputs
bias1 = tf.Variable(1.0) # Initialize bias1, should have same number of rows as feature cols, can also be used by: tf.Variable(tf.ones([cols]))
weights1 = tf.ones((3, 2)) # Initialize weights1 as 3x2 variable of ones (incoming 3 columns as input neurons, hidden layer 2 neurons)
product1 = tf.matmul(features, weights1) # Perform matrix multiplication of borrower_features and weights1
dense1 = tf.keras.activations.sigmoid(product1 + bias1) # Apply sigmoid activation function to product1 + bias1
print("\n dense1's output shape: {}".format(dense1.shape)) # Print shape of dense1
bias2 = tf.Variable(1.0)  # Initialize bias2
weights2 = tf.ones((2, 1)) # Initialize weights2 as 2x1 variable of ones (previous layer 2 neurons, final layer 1 neuron)
product2 = tf.matmul(dense1,weights2)  # Perform matrix multiplication of dense1 and weights2
prediction = tf.keras.activations.sigmoid(product2 + bias2) # Apply activation to product2 + bias2 and print the prediction
print('\n prediction: {}'.format(prediction.numpy()[0,0]))
print('\n actual: 1')
# Print error for first five examples
actual = df["target"].values
error = actual[:5] - prediction.numpy()[:5]

## NOTE: Also see CUSTOM TENSORFLOW MODEL and TENSORFLOW BASICStensor is multidimensional array

Python相关代码片段

create admin in Django

Convert a string to an int in Python

python named parameters command line

Return a list from a function in Python

keras decoder of the inference model

teacher forcing in keras

df.rename(columns= 'a' 'b' )

Python check whether object is picklable

tkinter custom icons

title of a webpage using selenium python

what does DataFrame.factorize do in pandas

tensorflow mse

encoder decoder architecture in keras

pathlib replace extension

pandas access datetime filed

python stopwatch decorator

There is more than one Python

tkinter combobox get value

beautifulsoup returns empty list

importerror: numba needs numpy 1.21 or less

how to install jdk in windows 10

combo box tkinte

run python script on raspberry pi command line

minus infinity in python

hallar angulo de referencia python

create filtered pivot tables in pandas

text in keras

how to mirror screen on tv with python

python read file to string

python import tuple

python sefine function type

meta llama 3 base huggingface

equal to or more than python

equal to or less than python

python pillow transparent background

CLEAR ALL MIGRATIONS DJANGO

decrement in python

hugging face to dataframe

Table Creation and Data Insertion in PySpark

Testing ETL Framework for Fixed-Length Files

Making Bulk SQL Queries from Notebooks

dictionary of tuple python

kml to csv with scraping

kml to csv without scraping

gan in keras

Qrcode Python

No module named 'GeminiAIChat'

How not to write Python codes

tensorflow basics

GET in python

deep learning in tensorflow

custom tensorflow model

pathlib python

pandas excelwriter

python practise

key,value en python

linear regression in tensorflow

spark dataframe get column

drop one table sqlalchemy

python code to remove last character from string

fastapi get body on http middleware

custom neural network in keras

python selenium execute_script

db model for blog

yolov5 opencv

Get first 100 lines of file - python

python playground

print number pattern using for loop in python

ollama python

no module named 'wget'

No module named 'langchain'

failed to build wxpython

python vs c#

rabbitmq python example

change the django url prefix name

np.linspace is not defined python

LLM beguiner guide python

python parquet file to csv

python best practices

yolov5 without net