I hereby claim:
- I am dantuluri on github.
- I am dantuluri (https://keybase.io/dantuluri) on keybase.
- I have a public key ASD0w4Un6XlOumqA6HXmIUrcbhMZD_wNS4D8iDE2X4bfLwo
To claim this, I am signing this object:
| // C++ includes used for precompiling -*- C++ -*- | |
| // Copyright (C) 2003-2013 Free Software Foundation, Inc. | |
| // | |
| // This file is part of the GNU ISO C++ Library. This library is free | |
| // software; you can redistribute it and/or modify it under the | |
| // terms of the GNU General Public License as published by the | |
| // Free Software Foundation; either version 3, or (at your option) | |
| // any later version. |
| int[][] result; | |
| float t, c; | |
| float ease(float p) { | |
| return 3*p*p - 2*p*p*p; | |
| } | |
| float ease(float p, float g) { | |
| if (p < 0.5) | |
| return 0.5 * pow(2*p, g); |
| int[][] result; | |
| float t, c; | |
| float ease(float p) { | |
| return 3*p*p - 2*p*p*p; | |
| } | |
| float ease(float p, float g) { | |
| if (p < 0.5) | |
| return 0.5 * pow(2*p, g); |
| // modified distill.pub template v1 for world models. | |
| (function (global, factory) { | |
| typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : | |
| typeof define === 'function' && define.amd ? define(['exports'], factory) : | |
| (factory((global.dl = global.dl || {}))); | |
| }(this, (function (exports) { 'use strict'; | |
| var html = function(dom) { | |
| if (!dom.querySelector("html").getAttribute("lang")) { | |
| dom.querySelector("html").setAttribute("lang", "en"); |
I hereby claim:
To claim this, I am signing this object:
| import numpy as np | |
| import math | |
| import tensorflow as tf | |
| from mpi4py import MPI | |
| from spinup.utils.mpi_tools import broadcast | |
| def flat_concat(xs): | |
| return tf.concat([tf.reshape(x,(-1,)) for x in xs], axis=0) |
| import numpy as np | |
| import math | |
| import tensorflow as tf | |
| import gym | |
| import time | |
| import spinup.algos.vpg.core as core | |
| from spinup.utils.logx import EpochLogger | |
| from spinup.utils.mpi_tf import MpiAdamOptimizer, sync_all_params | |
| from spinup.utils.mpi_tools import mpi_fork, mpi_avg, proc_id, mpi_statistics_scalar, num_procs |
| import numpy as np | |
| import math | |
| import tensorflow as tf | |
| from mpi4py import MPI | |
| from spinup.utils.mpi_tools import broadcast | |
| def flat_concat(xs): | |
| return tf.concat([tf.reshape(x,(-1,)) for x in xs], axis=0) |
| import numpy as np | |
| import math | |
| import tensorflow as tf | |
| import gym | |
| import time | |
| import spinup.algos.vpg.core as core | |
| from spinup.utils.logx import EpochLogger | |
| from spinup.utils.mpi_tf import MpiAdamOptimizer, sync_all_params, MpiAdadeltaOptimizer, MpiAdagradOptimizer, MpiFtrlOptimizer, MpiGradientDescentOptimizer, MpiMomentumOptimizer, MpiProximalAdagradOptimizer, MpiProximalGradientDescentOptimizer, MpiRMSPropOptimizer, MpiAdaMaxOptimizer, MpiAdamGSOptimizer, MpiAdamWOptimizer, MpiAddSignOptimizer, MpiGGTOptimizer, MpiLARSOptimizer, MpiLazyAdamGSOptimizer, MpiLazyAdamOptimizer, MpiMomentumWOptimizer, MpiNadamOptimizer, MpiPowerSignOptimizer, MpiShampooOptimizer | |
| from spinup.utils.mpi_tools import mpi_fork, mpi_avg, proc_id, mpi_statistics_scalar, num_procs |
| import numpy as np | |
| import math | |
| import tensorflow as tf | |
| from mpi4py import MPI | |
| from spinup.utils.mpi_tools import broadcast | |
| def flat_concat(xs): | |
| return tf.concat([tf.reshape(x,(-1,)) for x in xs], axis=0) |