Spring Boot and RESTful API(3)Cassandra

RoyKings 2017-07-06

SpringBootandRESTfulAPI(3)Cassandra

InstallationoftheLatest3.10Version

>wgethttp://mirror.stjschools.org/public/apache/cassandra/3.10/apache-cassandra-3.10-bin.tar.gz

unzipthefileandputitinworkingdirectory,addbintothePATH

Preparethedirectory

>sudomkdir/var/lib/cassandra

>sudochown-Rcarl/var/lib/cassandra

Commandtostartcassandra

>cassandra-Dcassandra.config=file:///opt/cassandra/conf/cassandra.yaml

Clientconnecttolocal

>cqlshlocalhost9042

RuntheEasySample

PreparetheDatabase

cqlsh>CREATEKEYSPACEmykeyspaceWITHREPLICATION={'class':'SimpleStrategy','replication_factor':1};

cqlsh>usemykeyspace;

cqlsh>CREATETABLEcustomer(idTimeUUIDPRIMARYKEY,firstnametext,lastnametext);

cqlsh>CREATEINDEXcustomerfistnameindexONcustomer(firstname);

cqlsh>CREATEINDEXcustomersecondnameindexONcustomer(lastname);

cqlsh>CREATETABLEjobcountdiff(

source_idtext,

record_dateTIMESTAMP,

new_active_countint,

new_admin_countint,

old_active_countint,

old_admin_countint,

PRIMARYKEY(source_id,record_date)

)WITHCLUSTERINGORDERBY(record_dateDESC);

cqlsh>INSERTINTOjobcountdiff(source_id,record_date,new_active_count,new_admin_count,old_active_count,old_admin_count)VALUES('9527',toTimestamp(now()),1,1,1,1);

cqlsh>INSERTINTOjobcountdiff(source_id,record_date,new_active_count,new_admin_count,old_active_count,old_admin_count)VALUES('9527',toTimestamp(now()),2,2,2,2);

cqlsh>INSERTINTOjobcountdiff(source_id,record_date,new_active_count,new_admin_count,old_active_count,old_admin_count)VALUES('9527',toTimestamp(now()),3,3,3,3);

cqssh>INSERTINTOjobcountdiff(source_id,record_date,new_active_count,new_admin_count,old_active_count,old_admin_count)VALUES('9528',toTimestamp(now()),3,3,3,3);

sqlsh>select*fromjobcountdiffwheresource_id='9527'orderbyrecord_datedesc;

cqlsh>DESCRIBETABLES;

https://academy.datastax.com/resources/getting-started-time-series-data-modeling

sqlsh>select*fromjobcountdiffwheresource_id='9527'andrecord_date>'2017-06-01'andrecord_date<'2017-06-18’;

sqlsh>select*fromjobcountdiffwheresource_id='9527’;

sqlsh>CREATETABLEtemperature_by_day3(

datetext,

weatherstation_idtext,

event_timetimestamp,

temperaturetext,

PRIMARYKEY((date),weatherstation_id,event_time)

);

sqlsh>INSERTINTOtemperature_by_day3(weatherstation_id,date,event_time,temperature)

VALUES('1234ABCD','2013-04-03','2013-04-0307:01:00','72F’);

sqlsh>INSERTINTOtemperature_by_day3(weatherstation_id,date,event_time,temperature)

VALUES('1234ABCD','2013-04-03','2013-04-0308:01:00','72F');

sqlsh>INSERTINTOtemperature_by_day3(weatherstation_id,date,event_time,temperature)

VALUES('1234ABCE','2013-04-03','2013-04-0307:01:00','72F');

sqlsh>CREATETABLEtemperature_by_day4(datetext,weatherstation_idtext,event_timetimestamp,temperaturetext,PRIMARYKEY((date),weatherstation_id,event_time))WITHCLUSTERINGORDERBY(weatherstation_idDESC,event_timeDESC);

sqlsh>INSERTINTOtemperature_by_day4(weatherstation_id,date,event_time,temperature)

VALUES('1234ABCE','2013-04-03','2013-04-0307:01:00','72F’);

sqlsh>INSERTINTOtemperature_by_day4(weatherstation_id,date,event_time,temperature)

VALUES('1234ABCE','2013-04-03','2013-04-0308:01:00','72F');

sqlsh>INSERTINTOtemperature_by_day4(weatherstation_id,date,event_time,temperature)

VALUES('1234ABCD','2013-04-03','2013-04-0308:01:00','72F');

Querymaybeneeded:

select*fromjobcountdiffwheresource_id=9527;

select*fromjobcountdiffwheresource_id='9527'andrecord_date>'2017-06-15';

selectdistinctsource_idfromjobcountdiff;

SetUpLoggingProjectandDatabase

>CREATEKEYSPACEjobsmonitorWITHREPLICATION={'class':'SimpleStrategy','replication_factor':1};

>usejobsmonitor;

>CREATETABLEjobcounthistory(

source_idtext,

record_dateTIMESTAMP,

new_active_countint,

new_admin_countint,

old_active_countint,

old_admin_countint,

PRIMARYKEY(source_id,record_date)

)WITHCLUSTERINGORDERBY(record_dateDESC);

Possiblequeryisasfollow:

select*fromjobcounthistorywheresource_id='asdfasf';

select*fromjobcounthistorywheresource_id='asdf'andrecord_date>'2017-06-11';

>CREATETABLEjobcountdiff(

datetext,

diffint,

record_dateTIMESTAMP,

source_idtext,

PRIMARYKEY(date,diff,record_date)

)WITHCLUSTERINGORDERBY(diffASC,record_dateDESC);

PossibleQueryisasfollow:

select*fromjobcountdiffwheredate='2017-06-15';

select*fromjobcountdiffwheredate='2017-06-15'anddiff>10;

http://docs.spring.io/spring-data/cassandra/docs/2.0.0.M4/reference/html/

CassandraStructureReferencefromOldProject

CampaignProfile->camapignID,profileID(MySQL)

Profile->profileID,brandCode,name,enabled,description,rules(MySQL)

Rule->profileID,attributeMetadataID,operator,value(MySQL)

AttributeMetadata->id,brandCode,name,attributeType,allowedValues,enabled,dateCreated(MySQL)

Attributes->brandCode,deviceID,Attributes(Map)

put->brandCode,deviceID,unixtime,attributes

valmutator=HFactory.createMutator(ksp,CompositeSerializer.get)

valcolumn=HFactory.createColumn(unixtime,attrib.toBytes)

column.setClock(ksp.createClock)

mutator.insert(rowkey(brandCode,deviceId),columnFamilyName,column)

if(config.getBoolean(env+".attributes.indexOnPut")){

DeviceLookup.update(brandCode,deviceId,attrib,Profile.enabled(brandCode))

}

get->brandCode,deviceID,unixtime

valq=HFactory.createSliceQuery(ksp,CompositeSerializer.get,LongSerializer.get,BytesArraySerializer.get)

.setKey(rowkey(brandCode,deviceId))

.setColumnFamily(columnFamilyName)

.setRange(unixtime,0L,false,1)

q.execute.get.getColumns.headOption.map(x=>com.digby.localpoint.model.Attributes(x.getValue))

get->brandCode,deviceID

valq=HFactory.createSliceQuery(ksp,CompositeSerializer.get,LongSerializer.get,BytesArraySerializer.get)

.setKey(rowkey(brandCode,deviceId))

.setColumnFamily(columnFamilyName)

.setRange(null,null,false,1)

q.execute.get.getColumns.headOption.map(x=>com.digby.localpoint.model.Attributes(x.getValue))

DeviceLookup->brandCode,profileID,

addDevice(brandCode,profileId,deviceID)

valmutator:Mutator[Composite]=HFactory.createMutator(ksp,CompositeSerializer.get)

valcolumn=HFactory.createColumn(deviceId,"")

column.setClock(ksp.createClock)

mutator.addInsertion(rowkey(brand,profileId),columnFamilyName,column)

mutator.execute()

addDevices(brandCode,profileID,deviceIDs:List[String])

varcnt=0valmutator:Mutator[Composite]=HFactory.createMutator(ksp,CompositeSerializer.get)

for(deviceId<-deviceIds){

cnt+=1valcolumn=HFactory.createColumn(deviceId,"")

column.setClock(ksp.createClock)

mutator.addInsertion(rowkey(brand,profileId),columnFamilyName,column)

if((cnt%batchSize)==0||deviceIds.length<=batchSize)

mutator.execute()

}

defremoveDevice(brand:String,profileId:Profile.Id,deviceId:String):Unit={

//Removecolumnvalmutator:Mutator[Composite]=HFactory.createMutator(ksp,CompositeSerializer.get)

mutator.addDeletion(rowkey(brand,profileId),columnFamilyName,deviceId,StringSerializer.get)

mutator.execute()

}

defremoveDevices(brand:String,profileId:Profile.Id,deviceIds:List[String]):Unit={

//Removedevicesvarcnt=0valmutator:Mutator[Composite]=HFactory.createMutator(ksp,CompositeSerializer.get)

for(deviceId<-deviceIds){

cnt+=1mutator.addDeletion(rowkey(brand,profileId),columnFamilyName,deviceId,StringSerializer.get)

if((cnt%batchSize)==0||deviceIds.length<=batchSize)

mutator.execute()

}

}

defupdate(brandCode:String,deviceId:String,attributes:Attributes,profiles:Set[Profile]):Unit={

valmutator:Mutator[Composite]=HFactory.createMutator(ksp,CompositeSerializer.get)

valcolumn=HFactory.createColumn(deviceId,Array[Byte]())

valclock=ksp.createClock

column.setClock(clock)

profiles.foreach{p=>

if(p(attributes))

mutator.addInsertion(rowkey(brandCode,p.id.get),columnFamilyName,column)

elsemutator.addDeletion(rowkey(brandCode,p.id.get),columnFamilyName,deviceId,StringSerializer.get,clock)

}

mutator.execute()

}

Taketheseasexample,then

CREATETABLEattributes(

brandCodetext,

deviceIDtext,

PRIMARYKEY(brandCode,deviceID)

);

>altertableattributesaddt1499356374varchar;

>insertintoattributes(brandcode,deviceid,t1499356373)values('1','1','gooddevice');

Wecanhavedynamiccolumnsforonekeyset.

Redshift

Yesterday,Iconnecttoourredshiftdatabasetoquerysomedataaswell.

Weusedthisdriver

https://s3.amazonaws.com/redshift-downloads/drivers/RedshiftJDBC41-1.2.1.1001.jar

Andwedownloadthisopensourcetool

http://www.sql-workbench.net/downloads.html

CreateanewprofilefromManageDrivers

References:

https://github.com/spring-projects/spring-boot/tree/v2.0.0.M1/spring-boot-samples/spring-boot-sample-data-cassandra

相关推荐