Major refactoring. Immutable DataPoints and Tables

This commit is contained in:
Alexander Nozik 2016-04-26 23:40:05 +03:00
parent 78e1554804
commit ac8340006d
21 changed files with 134 additions and 99 deletions

View File

@ -21,7 +21,7 @@ import hep.dataforge.control.collectors.RegularPointCollector;
import hep.dataforge.control.measurements.DataDevice;
import hep.dataforge.control.ports.PortHandler;
import hep.dataforge.control.ports.TcpPortHandler;
import hep.dataforge.tables.TableFormatBuilder;
import hep.dataforge.tables.TableTableFormatBuilder;
import hep.dataforge.exceptions.ControlException;
import hep.dataforge.exceptions.PortException;
import hep.dataforge.exceptions.StorageException;
@ -85,16 +85,16 @@ public class PKT8Device extends DataDevice<PKT8Device.PKT8Measurement> implement
String suffix = Integer.toString((int) Instant.now().toEpochMilli());
// Building data format
TableFormatBuilder formatBuilder = new TableFormatBuilder()
TableTableFormatBuilder TableFormatBuilder = new TableTableFormatBuilder()
.addTime("timestamp");
List<String> names = new ArrayList<>();
for (PKT8Channel channel : this.channels.values()) {
formatBuilder.addNumber(channel.getName());
TableFormatBuilder.addNumber(channel.getName());
names.add(channel.getName());
}
this.pointLoader = LoaderFactory.buildPointLoder(storage, "cryotemp_" + suffix, "", "timestamp", formatBuilder.build());
this.pointLoader = LoaderFactory.buildPointLoder(storage, "cryotemp_" + suffix, "", "timestamp", TableFormatBuilder.build());
Duration duration = Duration.parse(meta().getString("averagingDuration", "PT30S"));

View File

@ -7,10 +7,10 @@
package inr.numass.scripts
import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.points.FormatBuilder
import hep.dataforge.points.ListPointSet
import hep.dataforge.points.MapPoint
import hep.dataforge.points.PointSet
import hep.dataforge.tables.TableFormatBuilder
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.MapPoint
import hep.dataforge.tables.Table
import inr.numass.data.NumassData
import inr.numass.data.*
import javafx.stage.FileChooser
@ -31,7 +31,7 @@ Map<Double, Double> dif(NumassData data1, NumassData data2, double uset){
}
def buildSet(NumassData data1, NumassData data2, double... points){
FormatBuilder builder = new FormatBuilder().addNumber("channel");
TableFormatBuilder builder = new TableFormatBuilder().addNumber("channel");
List<MapPoint> pointList = new ArrayList<>();
for(double point: points){
@ -50,7 +50,7 @@ def buildSet(NumassData data1, NumassData data2, double... points){
}
}
ListPointSet set = new ListPointSet(pointList,builder.build());
ListTable set = new ListTable(pointList,builder.build());
}

View File

@ -16,7 +16,7 @@
package inr.numass.scripts;
import hep.dataforge.context.GlobalContext;
import hep.dataforge.points.ListPointSet;
import hep.dataforge.tables.ListTable;
import hep.dataforge.datafitter.FitManager;
import hep.dataforge.datafitter.FitState;
import hep.dataforge.datafitter.MINUITPlugin
@ -85,7 +85,7 @@ allPars.setParValue("trap", 1d);
allPars.setParError("trap", 0.2d);
allPars.setParDomain("trap", 0d, Double.POSITIVE_INFINITY);
ListPointSet data = readData("c:\\Users\\Darksnake\\Dropbox\\PlayGround\\RUN23.DAT", 18400d);
ListTable data = readData("c:\\Users\\Darksnake\\Dropbox\\PlayGround\\RUN23.DAT", 18400d);
FitState state = new FitState(data, model, allPars);

View File

@ -17,7 +17,7 @@ package inr.numass.scripts;
import hep.dataforge.context.GlobalContext;
import static hep.dataforge.context.GlobalContext.out;
import hep.dataforge.points.ListPointSet;
import hep.dataforge.tables.ListTable;
import hep.dataforge.datafitter.FitManager;
import hep.dataforge.datafitter.FitState;
import hep.dataforge.datafitter.FitTask;
@ -77,10 +77,10 @@ allPars.setParDomain("trap", 0d, Double.POSITIVE_INFINITY);
// PrintNamed.printSpectrum(GlobalContext.out(), spectrum, allPars, 0.0, 18700.0, 600);
//String fileName = "d:\\PlayGround\\merge\\scans.out";
// String configName = "d:\\PlayGround\\SCAN.CFG";
// ListPointSet config = OldDataReader.readConfig(configName);
// ListTable config = OldDataReader.readConfig(configName);
SpectrumGenerator generator = new SpectrumGenerator(model, allPars, 12316);
ListPointSet data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(13500d, 18200, 1e6, 60));
ListTable data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(13500d, 18200, 1e6, 60));
// data = data.filter("X", Value.of(15510.0), Value.of(18610.0));
// allPars.setParValue("X", 0.4);

View File

@ -17,7 +17,7 @@ package inr.numass.scripts;
import hep.dataforge.context.GlobalContext;
import static hep.dataforge.context.GlobalContext.out;
import hep.dataforge.points.ListPointSet;
import hep.dataforge.tables.ListTable;
import hep.dataforge.datafitter.FitManager;
import hep.dataforge.datafitter.FitState;
import hep.dataforge.datafitter.FitTask;
@ -83,10 +83,10 @@ allPars.setParDomain("trap", 0d, Double.POSITIVE_INFINITY);
// PrintNamed.printSpectrum(GlobalContext.out(), spectrum, allPars, 0.0, 18700.0, 600);
//String fileName = "d:\\PlayGround\\merge\\scans.out";
// String configName = "d:\\PlayGround\\SCAN.CFG";
// ListPointSet config = OldDataReader.readConfig(configName);
// ListTable config = OldDataReader.readConfig(configName);
SpectrumGenerator generator = new SpectrumGenerator(model, allPars, 12316);
ListPointSet data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(14000d, 18500, 2000, 90));
ListTable data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(14000d, 18500, 2000, 90));
data = TritiumUtils.correctForDeadTime(data, new SpectrumDataAdapter(), 1e-8);
// data = data.filter("X", Value.of(15510.0), Value.of(18610.0));

View File

@ -56,10 +56,10 @@ PrintNamed.printSpectrum(new PrintWriter(System.out), spectrum, allPars, 18495,
// //String fileName = "d:\\PlayGround\\merge\\scans.out";
//// String configName = "d:\\PlayGround\\SCAN.CFG";
//// ListPointSet config = OldDataReader.readConfig(configName);
//// ListTable config = OldDataReader.readConfig(configName);
// SpectrumGenerator generator = new SpectrumGenerator(model, allPars, 12316);
//
// ListPointSet data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(18495, 18505, 20, 20));
// ListTable data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(18495, 18505, 20, 20));
//
//// data = data.filter("X", Value.of(15510.0), Value.of(18610.0));
//// allPars.setParValue("X", 0.4);

View File

@ -17,7 +17,7 @@ package inr.numass.scripts;
import hep.dataforge.context.GlobalContext;
import static hep.dataforge.context.GlobalContext.out;
import hep.dataforge.points.ListPointSet;
import hep.dataforge.tables.ListTable;
import hep.dataforge.datafitter.FitManager;
import hep.dataforge.datafitter.FitState;
import hep.dataforge.datafitter.FitTask;
@ -89,7 +89,7 @@ SpectrumGenerator generator = new SpectrumGenerator(model, allPars, seed);
def config = DataModelUtils.getUniformSpectrumConfiguration(18530d, 18580, 1e7, 60)
//def config = DataModelUtils.getSpectrumConfigurationFromResource("/data/run23.cfg")
ListPointSet data = generator.generateExactData(config);
ListTable data = generator.generateExactData(config);
FitState state = new FitState(data, model, allPars);

View File

@ -17,7 +17,7 @@ package inr.numass.scripts;
import hep.dataforge.context.GlobalContext;
import static hep.dataforge.context.GlobalContext.out;
import hep.dataforge.points.ListPointSet;
import hep.dataforge.tables.ListTable;
import hep.dataforge.datafitter.FitManager;
import hep.dataforge.datafitter.FitState;
import hep.dataforge.datafitter.FitTask;
@ -84,7 +84,7 @@ allPars.setPar("trap", 0, 0.01, 0d, Double.POSITIVE_INFINITY);
SpectrumGenerator generator = new SpectrumGenerator(model, allPars, 12316);
ListPointSet data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(14000d, 18200, 1e6, 60));
ListTable data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(14000d, 18200, 1e6, 60));
// data = data.filter("X", Value.of(15510.0), Value.of(18610.0));
allPars.setParValue("U2", 0);

View File

@ -17,7 +17,7 @@ package inr.numass.scripts;
import hep.dataforge.context.GlobalContext;
import hep.dataforge.data.DataSet;
import hep.dataforge.points.ListPointSet;
import hep.dataforge.tables.ListTable;
import hep.dataforge.datafitter.FitManager;
import hep.dataforge.datafitter.FitState;
import hep.dataforge.datafitter.ParamSet;
@ -78,7 +78,7 @@ allPars.setParDomain("trap", 0d, Double.POSITIVE_INFINITY);
SpectrumGenerator generator = new SpectrumGenerator(model, allPars);
// ColumnedDataFile file = new ColumnedDataFile("d:\\PlayGround\\RUN36.cfg");
// ListPointSet config = file.getDataSet("time","X");
// ListTable config = file.getDataSet("time","X");
double Elow = 14000d;
double Eup = 18600d;
int numpoints = (int) ((Eup - Elow) / 50);
@ -86,7 +86,7 @@ double time = 1e6 / numpoints; // 3600 / numpoints;
DataSet config = getUniformSpectrumConfiguration(Elow, Eup, time, numpoints);
// config.addAll(DataModelUtils.getUniformSpectrumConfiguration(Eup, Elow, time, numpoints));// в обратную сторону
ListPointSet data = generator.generateData(config);
ListTable data = generator.generateData(config);
// plotTitle = "Generated tritium spectrum data";
// pm.plotXYScatter(data, "X", "Y",plotTitle, null);
// bareBeta.setFSS("D:\\PlayGround\\FSS.dat");

View File

@ -15,8 +15,8 @@
*/
package inr.numass.prop;
import hep.dataforge.points.DataPoint;
import hep.dataforge.points.XYAdapter;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.XYAdapter;
import hep.dataforge.exceptions.NameNotFoundException;
import hep.dataforge.values.Value;

View File

@ -25,8 +25,8 @@ import hep.dataforge.datafitter.models.HistogramModel;
import hep.dataforge.functions.ParametricFunction;
import hep.dataforge.maths.MatrixOperations;
import hep.dataforge.maths.RandomUtils;
import hep.dataforge.points.DataPoint;
import hep.dataforge.points.PointSet;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.Table;
import inr.numass.models.BetaSpectrum;
import inr.numass.models.NBkgSpectrum;
import java.io.FileNotFoundException;
@ -72,7 +72,7 @@ public class PropTest {
//pm.plotFunction(trans.getProduct(bareBeta, allPars, 9000d), 1000d, 19000d, 400);
// pm.plotFunction(FunctionUtils.fix1stArgument(trans.getBivariateFunction(allPars), 14000d), 1000, 18000, 400);
HistogramGenerator generator = new HistogramGenerator(null, model, allPars);
PointSet data = generator.generateUniformHistogram(1000d, 18500d, 350);
Table data = generator.generateUniformHistogram(1000d, 18500d, 350);
long count = 0;
for (DataPoint dp : data) {

View File

@ -17,9 +17,9 @@ package inr.numass.prop.ar;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.points.DataPoint;
import hep.dataforge.points.ListPointSet;
import hep.dataforge.points.MapPoint;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.MapPoint;
import hep.dataforge.datafitter.FitManager;
import hep.dataforge.datafitter.FitPlugin;
import hep.dataforge.datafitter.FitState;
@ -37,18 +37,18 @@ import inr.numass.prop.SplitNormalSpectrum;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import hep.dataforge.points.PointSet;
import hep.dataforge.tables.Table;
/**
*
* @author Darksnake
*/
@TypedActionDef(name = "fitJNA", inputType = JNAEpisode.class, outputType = PointSet.class, description = "Fit JNA data by apropriate model")
@TypedActionDef(name = "fitJNA", inputType = JNAEpisode.class, outputType = Table.class, description = "Fit JNA data by apropriate model")
@ValueDef(name = "saveResult", type = "BOOLEAN", def = "true", info = "Save the results of action to a file")
@ValueDef(name = "suffix", def = "", info = "Suffix for saved file")
@ValueDef(name = "loFitChanel", type = "NUMBER", def = "600", info = "Lo chanel to filter data for fit")
@ValueDef(name = "upFitChanel", type = "NUMBER", def = "1100", info = "Up chanel to filter data for fit")
public class FitJNAData extends OneToOneAction<JNAEpisode, PointSet> {
public class FitJNAData extends OneToOneAction<JNAEpisode, Table> {
private final FitManager fm;
@ -63,7 +63,7 @@ public class FitJNAData extends OneToOneAction<JNAEpisode, PointSet> {
}
@Override
protected PointSet execute(Logable log, Meta meta, JNAEpisode input){
protected Table execute(Logable log, Meta meta, JNAEpisode input){
List<DataPoint> res = new ArrayList<>(input.size());
Model model = buildModel();
@ -96,7 +96,7 @@ public class FitJNAData extends OneToOneAction<JNAEpisode, PointSet> {
res.add(point);
}
PointSet data = new ListPointSet(input.getName(), input.meta(), res);
Table data = new ListTable(input.getName(), input.meta(), res);
if (meta.getBoolean("saveResult")) {
String suffix = meta.getString("suffix");
@ -111,7 +111,7 @@ public class FitJNAData extends OneToOneAction<JNAEpisode, PointSet> {
Meta reader = readMeta(spectrum.meta());
double lowerChanel = reader.getDouble("loFitChanel");
double upperChanel = reader.getDouble("upFitChanel");
PointSet data = spectrum.asDataSet().filter("chanel", lowerChanel, upperChanel);
Table data = spectrum.asDataSet().filter("chanel", lowerChanel, upperChanel);
ParamSet params = new ParamSet()
.setPar("amp", 2e5, 1e3)
.setPar("pos", 800d, 1d)

View File

@ -16,9 +16,9 @@
package inr.numass.prop.ar;
import hep.dataforge.names.NamedMetaHolder;
import hep.dataforge.points.DataPoint;
import hep.dataforge.points.ListPointSet;
import hep.dataforge.points.MapPoint;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.MapPoint;
import hep.dataforge.description.NodeDef;
import hep.dataforge.description.ValueDef;
import hep.dataforge.meta.Meta;
@ -28,7 +28,7 @@ import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import hep.dataforge.points.PointSet;
import hep.dataforge.tables.Table;
/**
*
@ -61,12 +61,12 @@ public class JNASpectrum extends NamedMetaHolder {
}
}
public PointSet asDataSet() {
public Table asDataSet() {
List<DataPoint> points = new ArrayList<>();
for (Map.Entry<Double, Long> point : spectrum.entrySet()) {
points.add(new MapPoint(names, point.getKey(), point.getValue()));
}
return new ListPointSet(getName(), meta(), points);
return new ListTable(getName(), meta(), points);
}
public Map<Double, Long> asMap() {

View File

@ -17,8 +17,8 @@ package inr.numass.prop.ar;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.points.DataPoint;
import hep.dataforge.points.FileData;
import hep.dataforge.tables.DataPoint;
import hep.dataforge.tables.FileData;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
import hep.dataforge.exceptions.ContentException;
@ -38,7 +38,7 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import hep.dataforge.points.PointSet;
import hep.dataforge.tables.Table;
/**
*
@ -72,7 +72,7 @@ public class ReadJNADataAction extends OneToOneAction<FileData, JNAEpisode> {
Scanner timeScanner = new Scanner(timeFile);
String tempFileName = reader.getString("temperatureFile", "");
PointSet tempData = null;
Table tempData = null;
if (!tempFileName.isEmpty()) {
String[] format = {"time", "T2", "T4", "T5", "T6"};
File tempFile = IOUtils.getFile(input.getInputFile(), tempFileName);
@ -108,7 +108,7 @@ public class ReadJNADataAction extends OneToOneAction<FileData, JNAEpisode> {
}
private Meta prepareAnnotation(Meta parent, double startTime, double stopTime, PointSet tempData) {
private Meta prepareAnnotation(Meta parent, double startTime, double stopTime, Table tempData) {
MetaBuilder meta = parent.getBuilder();
meta.putValue("relativeStartTime", startTime);
meta.putValue("relativeStopTime", stopTime);

View File

@ -16,7 +16,7 @@
package inr.numass.prop;
import hep.dataforge.context.GlobalContext;
import hep.dataforge.points.FileData;
import hep.dataforge.tables.FileData;
import hep.dataforge.datafitter.MINUITPlugin;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.MetaBuilder;
@ -25,7 +25,7 @@ import inr.numass.prop.ar.JNAEpisode;
import inr.numass.prop.ar.ReadJNADataAction;
import java.io.File;
import java.io.FileNotFoundException;
import hep.dataforge.points.PointSet;
import hep.dataforge.tables.Table;
/**
*
@ -48,7 +48,7 @@ public class TestFit {
);
JNAEpisode spectra = new ReadJNADataAction(GlobalContext.instance(), null).runOne(file);
PointSet data = new FitJNAData(GlobalContext.instance(), null).runOne(spectra);
Table data = new FitJNAData(GlobalContext.instance(), null).runOne(spectra);
ColumnedDataWriter.writeDataSet(System.out, data, "***RESULT***");
}

View File

@ -16,8 +16,8 @@
package inr.numass.prop;
import hep.dataforge.context.GlobalContext;
import hep.dataforge.points.FileData;
import hep.dataforge.points.XYAdapter;
import hep.dataforge.tables.FileData;
import hep.dataforge.tables.XYAdapter;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.plots.PlotFrame;
@ -72,7 +72,7 @@ public class TestReader {
// double lowerChanel = 600;
// double upperChanel = 1100;
// PointSet data = sp.asDataSet().filter("chanel", lowerChanel, upperChanel);
// Table data = sp.asDataSet().filter("chanel", lowerChanel, upperChanel);
// ParamSet params = new ParamSet()
// .setPar("amp", 2e5, 1e3)
// .setPar("pos", 800d, 1d)

View File

@ -21,7 +21,7 @@ import java.nio.MappedByteBuffer
import java.nio.channels.FileChannel
import hep.dataforge.storage.commons.LoaderFactory
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.points.MapPoint
import hep.dataforge.tables.MapPoint
new StorageManager().startGlobal();

View File

@ -11,6 +11,10 @@ task runServer(type: JavaExec) {
standardInput = System.in
}
processResources {
from project(':dataforge-storage:storage-servlet').file('/src/main/resources/ratpack')
}
//sourceSets.main.resources{
// srcDir project(':dataforge-storage:storage-servlet').file('ratpack')
//}

View File

@ -27,11 +27,9 @@ import hep.dataforge.storage.filestorage.FileStorage;
import inr.numass.storage.NumassStorage;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.net.InetAddress;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ratpack.file.FileHandlerSpec;
import ratpack.handling.Chain;
import ratpack.server.BaseDir;
import ratpack.server.RatpackServer;
@ -86,10 +84,11 @@ public class NumassServer extends AbstractNetworkListener {
ratpack = RatpackServer.start((RatpackServerSpec server) -> server
.serverConfig((ServerConfigBuilder config) -> config
// .baseDir(Paths.get(getClass().getClassLoader().getResource("ratpack").toURI()))
// .baseDir(BaseDir.find())
.baseDir(BaseDir.find())
.address(InetAddress.getLocalHost())
.port(port))
.handlers((Chain chain) -> chain
// .files()
.files()
.get(new NumassRootHandler(this))
.get("storage", new NumassStorageHandler(root))
)

View File

@ -4,11 +4,23 @@
<title>Numass run notes</title>
<meta charset="UTF-8">
<meta http-equiv="refresh" content="30">
<!-- Bootstrap -->
<link href="css/bootstrap.min.css" rel="stylesheet">
</head>
<body>
<div class="container">
<div class="page-header">
<h1> Numass experiment run notes:</h1>
</div>
<#list notes as note>
<p>${note};</p>
</#list>
</div>
<!-- jQuery (necessary for Bootstrap's JavaScript plugins) -->
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js"></script>
<!-- Include all compiled plugins (below), or include individual files as needed -->
<script src="js/bootstrap.min.js"></script>
</body>
</html>

View File

@ -4,9 +4,8 @@
<meta charset="utf-8">
<meta http-equiv="refresh" content="30">
<!-- Latest compiled and minified CSS -->
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css" integrity="sha384-1q8mTJOASx8j1Au+a5WDVnPi2lkFfwwEAa8hDDdjZlpLegxhjVME1fgjWPGmkzs7" crossorigin="anonymous">
<!-- Bootstrap -->
<link href="css/bootstrap.min.css" rel="stylesheet">
<title>Numass storage</title>
</head>
@ -16,36 +15,57 @@
}
</style>
<body>
<div class="container">
<div class="page-header">
<h1> Server configuration </h1>
<div/>
<#if serverMeta??>
<div class="container">
<h3> Server metadata: </h3>
<div class="well well-lg">
${serverMeta}
</div>
</div>
</#if>
<br/>
<#if serverRootState??>
<div class="container">
<h3> Current root state: </h3>
<div class="well well-lg">
${serverRootState}
</div>
</div>
</#if>
<br/>
<#if runPresent>
<h1> Current run configuration </h1>
<h2> Current run configuration </h2>
<#if runMeta??>
<div class="container">
<h3> Run metadata: </h3>
<div class="well well-lg">
${runMeta}
</div>
</div>
</#if>
<#if runState?? >
<div class="container">
<h3> Current run state: </h3>
<div class="well well-lg">
${runState}
</div>
</div>
</#if>
<h2> Current run storage content: </h2>
<h3> Current run storage content: </h3>
<div class="well well-lg">
${storageContent}
</div>
</#if>
</div>
<!-- Optional theme -->
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap-theme.min.css" integrity="sha384-fLW2N01lMqjakBkx3l/M9EahuwpSfeNvV63J5ezn3uZzapT0u7EYsXMjQV+0En5r" crossorigin="anonymous">
<!-- Latest compiled and minified JavaScript -->
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/js/bootstrap.min.js" integrity="sha384-0mSbJDEHialfmuBBQP6A4Qrprq5OVfW37PRR3j5ELqxss1yVqOtnepnHVP9aJ7xS" crossorigin="anonymous"></script>
<!-- jQuery (necessary for Bootstrap's JavaScript plugins) -->
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js"></script>
<!-- Include all compiled plugins (below), or include individual files as needed -->
<script src="js/bootstrap.min.js"></script>
</body>
</html>