{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import sys\n",
    "sys.path.append('./src')\n",
    "import numpy as np\n",
    "import torch\n",
    "torch.manual_seed(0)\n",
    "import torch.nn as nn\n",
    "from torch.autograd import Variable,grad\n",
    "from src.model import FNN\n",
    "from src.util import *\n",
    "from src.train import *"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "def PDE(x,y,z,t,net):\n",
    "    X = torch.cat([x,y,z,t],axis=-1)\n",
    "    T = net(X)\n",
    "    \n",
    "    T_t = grad(T,t,create_graph=True,grad_outputs=torch.ones_like(T))[0]\n",
    "\n",
    "    T_x = grad(T,x,create_graph=True,grad_outputs=torch.ones_like(T))[0]\n",
    "    T_xx = grad(T_x,x,create_graph=True,grad_outputs=torch.ones_like(T_x))[0]\n",
    "    \n",
    "    T_y = grad(T,y,create_graph=True,grad_outputs=torch.ones_like(T))[0]\n",
    "    T_yy = grad(T_y,y,create_graph=True,grad_outputs=torch.ones_like(T_y))[0]\n",
    "    \n",
    "    T_z = grad(T,z,create_graph=True,grad_outputs=torch.ones_like(T))[0]\n",
    "    T_zz = grad(T_z,z,create_graph=True,grad_outputs=torch.ones_like(T_z))[0]\n",
    "    \n",
    "    f = rho*Cp*T_t - k*(T_xx+T_yy+T_zz)\n",
    "\n",
    "    return f"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "def generate_points(p=[],f=[]):\n",
    "\n",
    "    t = np.linspace(x_min[3]+0.01,x_max[3],61)\n",
    "\n",
    "    # boundary points\n",
    "    bound_x_neg,_ = sampling_uniform(1.,x_min,x_max,'-x',t)\n",
    "    bound_x_pos,_ = sampling_uniform(1.,x_min,x_max,'+x',t)\n",
    "\n",
    "    bound_y_neg,_ = sampling_uniform(1.,x_min,x_max,'-y',t)\n",
    "    bound_y_pos,_ = sampling_uniform(1.,x_min,x_max,'+y',t)\n",
    "\n",
    "    bound_z_neg,_ = sampling_uniform(1.,x_min,x_max,'-z',t)\n",
    "    bound_z_pos,_ = sampling_uniform(1.,x_min,x_max,'+z',t)\n",
    "\n",
    "    bound_z_pos_more = [] # more points for surface flux\n",
    "    \n",
    "    for ti in t:\n",
    "        if ti<=t_end:\n",
    "            zi,_ = sampling_uniform(.25,\n",
    "                        [max(x0+ti*v-2*r,x_min[0]),max(x_min[1],y0-2*r),x_min[2]],\n",
    "                        [min(x0+ti*v+2*r,x_max[0]),min(x_max[1],y0+2*r),x_max[2]],\n",
    "                        '+z',[ti])\n",
    "            bound_z_pos_more.append(zi)\n",
    "\n",
    "    bound_z_pos_more = np.vstack(bound_z_pos_more)\n",
    "    bound_z_pos = np.vstack((bound_z_pos,bound_z_pos_more))\n",
    "\n",
    "    ### domain points\n",
    "    domain_pts1,_ = sampling_uniform(2.,\n",
    "                                     [x_min[0],x_min[1],x_min[2]],\n",
    "                                     [x_max[0],x_max[1],x_max[2]-3.],'domain',t)\n",
    "\n",
    "    domain_pts2,_ = sampling_uniform(1.,\n",
    "                                     [x_min[0],x_min[1],x_max[2]-3.+.5],\n",
    "                                     [x_max[0],x_max[1],x_max[2]-1.],'domain',t)\n",
    "\n",
    "    domain_pts3 = []\n",
    "    for ti in t:\n",
    "        di,_ = sampling_uniform(.5,\n",
    "                                [x_min[0],x_min[1],x_max[2]-1.+.25,],\n",
    "                                [x_max[0],x_max[1],x_max[2]],'domain',[ti])\n",
    "        domain_pts3.append(di)\n",
    "    domain_pts3 = np.vstack(domain_pts3)\n",
    "    domain_pts = np.vstack((domain_pts1,domain_pts2,domain_pts3))\n",
    "\n",
    "    # initial points\n",
    "    init_pts1,_ = sampling_uniform(2.,[x_min[0],x_min[1],x_min[2]],\n",
    "                                   [x_max[0],x_max[1],x_max[2]],'domain',[0],e=0)\n",
    "    # more points near the toolpath origin\n",
    "    init_pts2,_ = sampling_uniform(.5,[x0-2,y0-2,x_max[2]-2],\n",
    "                                   [x0+2,y0+2,x_max[2]],'domain',[0])\n",
    "    \n",
    "    init_pts = np.vstack((init_pts1,init_pts2))\n",
    "    \n",
    "\n",
    "    p.extend([torch.tensor(bound_x_neg,requires_grad=True,dtype=torch.float).to(device),\n",
    "              torch.tensor(bound_x_pos,requires_grad=True,dtype=torch.float).to(device),\n",
    "              torch.tensor(bound_y_neg,requires_grad=True,dtype=torch.float).to(device),\n",
    "              torch.tensor(bound_y_pos,requires_grad=True,dtype=torch.float).to(device),\n",
    "              torch.tensor(bound_z_neg,requires_grad=True,dtype=torch.float).to(device),\n",
    "              torch.tensor(bound_z_pos,requires_grad=True,dtype=torch.float).to(device),\n",
    "              torch.tensor(init_pts,requires_grad=True,dtype=torch.float).to(device),\n",
    "              torch.tensor(domain_pts,requires_grad=True,dtype=torch.float).to(device)])\n",
    "    f.extend([['BC','-x'],['BC','+x'],['BC','-y'],['BC','+y'],['BC','-z'],['BC','+z'],['IC',T_ref],['domain']])\n",
    "    \n",
    "    return p,f"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "def BC(x,y,z,t,net,loc):\n",
    "    X = torch.cat([x,y,z,t],axis=-1)\n",
    "    T = net(X)\n",
    "    if loc == '-x':\n",
    "        T_x = grad(T,x,create_graph=True,grad_outputs=torch.ones_like(T))[0]\n",
    "        return k*T_x - h*(T-T_ref) - Rboltz*emiss*(T**4-T_ref**4)\n",
    "    if loc == '+x':\n",
    "        T_x = grad(T,x,create_graph=True,grad_outputs=torch.ones_like(T))[0]\n",
    "        return -k*T_x - h*(T-T_ref) - Rboltz*emiss*(T**4-T_ref**4)\n",
    "    if loc == '-y':\n",
    "        T_y = grad(T,y,create_graph=True,grad_outputs=torch.ones_like(T))[0]\n",
    "        return k*T_y - h*(T-T_ref) - Rboltz*emiss*(T**4-T_ref**4)\n",
    "    if loc == '+y':\n",
    "        T_y = grad(T,y,create_graph=True,grad_outputs=torch.ones_like(T))[0]\n",
    "        return -k*T_y - h*(T-T_ref) - Rboltz*emiss*(T**4-T_ref**4)\n",
    "    if loc == '-z':\n",
    "        T_t = grad(T,t,create_graph=True,grad_outputs=torch.ones_like(T))[0]\n",
    "        return T_t\n",
    "    if loc == '+z':\n",
    "        T_z = grad(T,z,create_graph=True,grad_outputs=torch.ones_like(T))[0]\n",
    "        q = 2*P*eta/3.14159265/r**2*torch.exp(-2*(torch.square(x-x0-v*t)+torch.square(y-y0))/r**2)*(t<=t_end)*(t>0)\n",
    "        return -k*T_z - h*(T-T_ref) - Rboltz*emiss*(T**4-T_ref**4) + q"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "def output_transform(X):\n",
    "    X = T_range*nn.Softplus()(X)+ T_ref\n",
    "    return X\n",
    "\n",
    "\n",
    "def input_transform(X):\n",
    "    X = 2.*(X-X_min)/(X_max-X_min) - 1.\n",
    "    return X"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "It: 0, Loss: 7.908e+03, BC: 2.314e+04, IC: 5.829e+06, PDE: 1.652e+00, Test: 4.669e+06, Time: 0.32\n",
      "It: 100, Loss: 5.986e+02, BC: 1.389e+03, IC: 4.062e+06, PDE: 2.149e-01, Test: 4.323e+06, Time: 23.99\n",
      "It: 200, Loss: 3.412e+02, BC: 6.527e+02, IC: 3.707e+06, PDE: 8.680e-02, Test: 4.173e+06, Time: 24.01\n",
      "It: 300, Loss: 2.861e+02, BC: 4.986e+02, IC: 3.596e+06, PDE: 6.264e-02, Test: 4.017e+06, Time: 24.08\n",
      "It: 400, Loss: 2.440e+02, BC: 3.817e+02, IC: 3.502e+06, PDE: 4.660e-02, Test: 3.879e+06, Time: 24.33\n",
      "It: 500, Loss: 2.120e+02, BC: 2.946e+02, IC: 3.415e+06, PDE: 3.531e-02, Test: 3.758e+06, Time: 24.38\n",
      "It: 600, Loss: 1.879e+02, BC: 2.303e+02, IC: 3.333e+06, PDE: 2.744e-02, Test: 3.650e+06, Time: 24.40\n",
      "It: 700, Loss: 1.695e+02, BC: 1.829e+02, IC: 3.256e+06, PDE: 2.197e-02, Test: 3.553e+06, Time: 24.46\n",
      "It: 800, Loss: 1.553e+02, BC: 1.478e+02, IC: 3.182e+06, PDE: 1.819e-02, Test: 3.466e+06, Time: 24.45\n",
      "It: 900, Loss: 1.443e+02, BC: 1.218e+02, IC: 3.109e+06, PDE: 1.558e-02, Test: 3.385e+06, Time: 24.44\n",
      "It: 1000, Loss: 1.354e+02, BC: 1.025e+02, IC: 3.038e+06, PDE: 1.381e-02, Test: 3.310e+06, Time: 24.40\n",
      "It: 1100, Loss: 1.282e+02, BC: 8.803e+01, IC: 2.967e+06, PDE: 1.263e-02, Test: 3.239e+06, Time: 24.34\n",
      "It: 1200, Loss: 1.223e+02, BC: 7.727e+01, IC: 2.896e+06, PDE: 1.188e-02, Test: 3.171e+06, Time: 24.36\n",
      "It: 1300, Loss: 1.173e+02, BC: 6.924e+01, IC: 2.826e+06, PDE: 1.145e-02, Test: 3.106e+06, Time: 24.19\n",
      "It: 1400, Loss: 1.130e+02, BC: 6.327e+01, IC: 2.756e+06, PDE: 1.127e-02, Test: 3.042e+06, Time: 24.39\n",
      "It: 1500, Loss: 1.092e+02, BC: 5.886e+01, IC: 2.686e+06, PDE: 1.129e-02, Test: 2.979e+06, Time: 24.39\n",
      "It: 1600, Loss: 1.058e+02, BC: 5.561e+01, IC: 2.616e+06, PDE: 1.146e-02, Test: 2.918e+06, Time: 24.52\n",
      "It: 1700, Loss: 1.027e+02, BC: 5.325e+01, IC: 2.547e+06, PDE: 1.176e-02, Test: 2.856e+06, Time: 24.32\n",
      "It: 1800, Loss: 9.981e+01, BC: 5.156e+01, IC: 2.479e+06, PDE: 1.217e-02, Test: 2.795e+06, Time: 24.29\n",
      "It: 1900, Loss: 9.714e+01, BC: 5.035e+01, IC: 2.411e+06, PDE: 1.266e-02, Test: 2.735e+06, Time: 24.03\n",
      "It: 2000, Loss: 9.461e+01, BC: 4.951e+01, IC: 2.343e+06, PDE: 1.322e-02, Test: 2.674e+06, Time: 24.02\n",
      "It: 2100, Loss: 9.219e+01, BC: 4.893e+01, IC: 2.276e+06, PDE: 1.385e-02, Test: 2.613e+06, Time: 24.02\n",
      "It: 2200, Loss: 8.985e+01, BC: 4.854e+01, IC: 2.210e+06, PDE: 1.452e-02, Test: 2.551e+06, Time: 24.02\n",
      "It: 2300, Loss: 8.759e+01, BC: 4.829e+01, IC: 2.145e+06, PDE: 1.522e-02, Test: 2.490e+06, Time: 24.02\n",
      "It: 2400, Loss: 8.538e+01, BC: 4.812e+01, IC: 2.080e+06, PDE: 1.595e-02, Test: 2.428e+06, Time: 24.01\n",
      "It: 2500, Loss: 8.321e+01, BC: 4.800e+01, IC: 2.016e+06, PDE: 1.670e-02, Test: 2.366e+06, Time: 24.14\n",
      "It: 2600, Loss: 8.108e+01, BC: 4.792e+01, IC: 1.953e+06, PDE: 1.745e-02, Test: 2.304e+06, Time: 24.34\n",
      "It: 2700, Loss: 7.899e+01, BC: 4.786e+01, IC: 1.891e+06, PDE: 1.821e-02, Test: 2.242e+06, Time: 24.27\n",
      "It: 2800, Loss: 7.693e+01, BC: 4.781e+01, IC: 1.830e+06, PDE: 1.896e-02, Test: 2.180e+06, Time: 24.37\n",
      "It: 2900, Loss: 7.490e+01, BC: 4.776e+01, IC: 1.769e+06, PDE: 1.970e-02, Test: 2.118e+06, Time: 24.35\n",
      "It: 3000, Loss: 7.289e+01, BC: 4.770e+01, IC: 1.709e+06, PDE: 2.043e-02, Test: 2.055e+06, Time: 24.04\n",
      "It: 3100, Loss: 7.091e+01, BC: 4.764e+01, IC: 1.651e+06, PDE: 2.113e-02, Test: 1.993e+06, Time: 24.00\n",
      "It: 3200, Loss: 6.896e+01, BC: 4.757e+01, IC: 1.593e+06, PDE: 2.180e-02, Test: 1.931e+06, Time: 24.62\n",
      "It: 3300, Loss: 6.703e+01, BC: 4.749e+01, IC: 1.536e+06, PDE: 2.244e-02, Test: 1.869e+06, Time: 24.77\n",
      "It: 3400, Loss: 6.514e+01, BC: 4.741e+01, IC: 1.480e+06, PDE: 2.305e-02, Test: 1.808e+06, Time: 24.80\n",
      "It: 3500, Loss: 6.327e+01, BC: 4.731e+01, IC: 1.425e+06, PDE: 2.362e-02, Test: 1.747e+06, Time: 24.72\n",
      "It: 3600, Loss: 6.143e+01, BC: 4.722e+01, IC: 1.370e+06, PDE: 2.414e-02, Test: 1.686e+06, Time: 24.70\n",
      "It: 3700, Loss: 5.962e+01, BC: 4.711e+01, IC: 1.317e+06, PDE: 2.462e-02, Test: 1.626e+06, Time: 24.70\n",
      "It: 3800, Loss: 5.784e+01, BC: 4.701e+01, IC: 1.265e+06, PDE: 2.505e-02, Test: 1.566e+06, Time: 24.74\n",
      "It: 3900, Loss: 5.609e+01, BC: 4.689e+01, IC: 1.214e+06, PDE: 2.543e-02, Test: 1.507e+06, Time: 24.93\n",
      "It: 4000, Loss: 5.438e+01, BC: 4.678e+01, IC: 1.163e+06, PDE: 2.576e-02, Test: 1.449e+06, Time: 24.96\n",
      "It: 4100, Loss: 5.270e+01, BC: 4.666e+01, IC: 1.114e+06, PDE: 2.605e-02, Test: 1.391e+06, Time: 24.54\n"
     ]
    }
   ],
   "source": [
    "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
    "# domain\n",
    "x_max = np.array([40., 10., 6., 3.])\n",
    "x_min = np.array([0. ,  0., 0., 0.])\n",
    "X_max = torch.tensor(x_max,dtype=torch.float).to(device)\n",
    "X_min = torch.tensor(x_min,dtype=torch.float).to(device)\n",
    "    \n",
    "# laser params\n",
    "x0 = 5.\n",
    "y0 = 5.\n",
    "r = 1.5\n",
    "v = 10. \n",
    "t_end = 3.\n",
    "P = 500.\n",
    "eta = .4\n",
    "\n",
    "# T_ambient, and max T range\n",
    "T_ref = 298.\n",
    "T_range = 3000.\n",
    "\n",
    "# material params\n",
    "Cp = .5\n",
    "k = .01\n",
    "h = 2e-5\n",
    "Rboltz = 5.6704e-14\n",
    "emiss = .3\n",
    "rho = 8e-3\n",
    "    \n",
    "# valid data\n",
    "data = np.load('./data/1_forward/data.npy')\n",
    "test_in = torch.tensor(data[:,0:4],requires_grad=False,dtype=torch.float).to(device)\n",
    "test_out = torch.tensor(data[:,4:5],requires_grad=False,dtype=torch.float).to(device)\n",
    "    \n",
    "    \n",
    "iterations = 50000\n",
    "lr = 2e-5\n",
    "\n",
    "net = FNN([4,64,64,64,1],nn.Tanh(),in_tf=input_transform,out_tf=output_transform)\n",
    "net.to(device)\n",
    "\n",
    "point_sets,flags = generate_points([],[])\n",
    "\n",
    "l_history,err_history = train(net,PDE,BC,point_sets,flags,iterations,lr=lr,info_num=100,\n",
    "                              test_in = test_in,test_out = test_out,w=[1.,1e-4,1.,1e-4],inv_params = [])\n",
    "\n",
    "torch.save(net.state_dict(),'./results/1_forward/no_auxilary_data.pt')\n",
    "np.save('./results/1_forward/no_auxilary_data_loss.npy',l_history)\n",
    "np.save('./results/1_forward/no_auxilary_data_error.npy',err_history)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
