solr wiki文档也有 
 
 
 
 
 
 
 
java code
 
public static void update() {
          try {
               String url = "http://192.168.0.237:8983/solr/weibo";
               SolrServer server = new HttpSolrServer(url);

// String zkHost = "192.168.0.237:2181/solr";
               // CloudSolrServer server = new CloudSolrServer(zkHost);
               // server.setDefaultCollection("weibo");

// SolrInputDocument doc1 = new SolrInputDocument();
               // doc1.addField("id", "1");
               // doc1.addField("title", "云南xxx科技");
               // doc1.addField("cat", "企业信息门户,元数据,数字沙盘,知识管理");
               //
               // SolrInputDocument doc2 = new SolrInputDocument();
               // doc2.addField("id", "2");
               // doc2.addField("title", "胡启稳");
               // doc2.addField("cat", "知识管理,企业信息门户,云南,昆明");
               //
               // SolrInputDocument doc3 = new SolrInputDocument();
               // doc3.addField("id", "3");
               // doc3.addField("title", "liferay");
               // doc3.addField("test_s", "这个内容能添加进去么?这是动态字段呀");

SolrInputDocument doc1 = new SolrInputDocument();
               doc1.addField("ID".toUpperCase(), "50dc4fa4c9dce9e193e87170");
              
              
              
               Map<String, String > operation = new HashMap<String ,String >();
               operation.put("set", "--------------------------");
              
//               doc1.addField("title".toUpperCase(), "云南xxx科技");
               doc1.addField("CONTENT".toUpperCase(), operation);
//               doc1.addField("ANALYKEYWORDLIST".toUpperCase(), "企业信息门户  云南  元数据  数字沙盘  知识管理");

// SolrInputDocument doc2 = new SolrInputDocument();
               // doc2.addField("id".toUpperCase(), "2");
               // doc2.addField("title".toUpperCase(), "胡启稳");
               // doc2.addField("content".toUpperCase(), "知识管理  企业信息门户 云南  昆明");
               //
               // SolrInputDocument doc3 = new SolrInputDocument();
               // doc3.addField("id".toUpperCase(), "3");
               // doc3.addField("title".toUpperCase(), "liferay");
               // doc3.addField("content".toUpperCase(),
               // "这个内容能添加进去么  云南  这是动态字段呀");

List docs = new ArrayList();
               docs.add(doc1);
               // docs.add(doc2);
               // docs.add(doc3);
//
//               UpdateRequest req = new UpdateRequest();
//              
//               UpdateRequest reqSuc = req.add(doc1);
//              
//               req.process(server);
//              
//              
//               System.out.println(reqSuc);
//               UpdateResponse updateResponse = server.
               server.add(docs);
               server.commit();
          } catch (SolrServerException e) {
               // TODO Auto-generated catch block
               e.printStackTrace();
          } catch (IOException e) {
               // TODO Auto-generated catch block
               e.printStackTrace();
          }

}

 
 附加代码:
TestSolr.java
package com.sekk.kk.util.search.solr.test;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map; import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.WildcardQuery;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrQuery.ORDER;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrServer;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.UpdateResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.MapSolrParams;
import org.apache.solr.common.params.SolrParams; import com.sekk.kk.util.search.solr.SolrManager;
import com.linktong.util.format.DateFormat;
import com.linktong.util.validate.Validate; /**
*/
public class TestSolr {
public static void main(String[] args) {
// getSolrServer(); update();
// query();
// delete();
// multiQuery(); // conditionQuery(); // deleteIndex4bbsUrl();
// multiThreadDeleteIndex4bbsUrl();
} public static void update() {
try {
String url = "http://192.168.0.237:8983/solr/weibo";
SolrServer server = new HttpSolrServer(url); // String zkHost = "192.168.0.237:2181/solr";
// CloudSolrServer server = new CloudSolrServer(zkHost);
// server.setDefaultCollection("weibo"); // SolrInputDocument doc1 = new SolrInputDocument();
// doc1.addField("id", "1");
// doc1.addField("title", "云南xxx科技");
// doc1.addField("cat", "企业信息门户,元数据,数字沙盘,知识管理");
//
// SolrInputDocument doc2 = new SolrInputDocument();
// doc2.addField("id", "2");
// doc2.addField("title", "胡启稳");
// doc2.addField("cat", "知识管理,企业信息门户,云南,昆明");
//
// SolrInputDocument doc3 = new SolrInputDocument();
// doc3.addField("id", "3");
// doc3.addField("title", "liferay");
// doc3.addField("test_s", "这个内容能添加进去么?这是动态字段呀"); SolrInputDocument doc1 = new SolrInputDocument();
doc1.addField("ID".toUpperCase(), "111");
// doc1.addField("title".toUpperCase(), "云南xxx科技");
doc1.addField("CONTENT".toUpperCase(), "企业信息门户 云南 元数据 数字沙盘 知识管理");
doc1.addField("ANALYKEYWORDLIST".toUpperCase(), "企业信息门户 云南 元数据 数字沙盘 知识管理"); // 局部更新
SolrInputDocument doc2 = new SolrInputDocument();
doc2.addField("ID".toUpperCase(), "50fdd2d7c9dc111541755740");
// doc1.addField("title".toUpperCase(), "云南xxx科技"); Map<String, String> operationMap = new HashMap<String, String>();
operationMap.put("set", "adasdasdsad"); doc2.addField("CONTENT".toUpperCase(), operationMap); // doc2.addField("ANALYKEYWORDLIST".toUpperCase(), "企业信息门户 云南 元数据 数字沙盘 知识管理"); // SolrInputDocument doc2 = new SolrInputDocument();
// doc2.addField("id".toUpperCase(), "2");
// doc2.addField("title".toUpperCase(), "胡启稳");
// doc2.addField("content".toUpperCase(), "知识管理 企业信息门户 云南 昆明");
//
// SolrInputDocument doc3 = new SolrInputDocument();
// doc3.addField("id".toUpperCase(), "3");
// doc3.addField("title".toUpperCase(), "liferay");
// doc3.addField("content".toUpperCase(),
// "这个内容能添加进去么 云南 这是动态字段呀"); List docs = new ArrayList();
// docs.add(doc1);
docs.add(doc2);
// docs.add(doc3); UpdateResponse updateResponse = server.add(docs);
System.out.println("updateResponse=" + updateResponse);
server.commit(); server.shutdown();
} catch (SolrServerException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} } public static void query() {
String url = "http://192.168.0.237:8983/solr";
SolrServer server = new HttpSolrServer(url); SolrQuery query = new SolrQuery("云南");
Map<String, String> map = new HashMap<String, String>();
// map.put(FacetParams.FACET_DATE, "manufacturedate_dt");
// map.put(FacetParams.FACET_DATE_START, "2004-01-01T00:00:00Z");
// map.put(FacetParams.FACET_DATE_END, "2010-01-01T00:00:00Z");
// map.put(FacetParams.FACET_DATE_GAP, "+1YEAR");
// map.put("indent", "on");
map.put("wt", "xml");
// map.put("hl.fl", "name");
SolrParams params = new MapSolrParams(map);
query.add(params);
query.setHighlight(true);
try {
QueryResponse response = server.query(query);
SolrDocumentList docs = response.getResults(); System.out.println("文档个数:" + response);
System.out.println("文档个数:" + docs.getNumFound());
System.out.println("查询时间:" + response.getQTime());
System.out.println(docs);
for (SolrDocument doc : docs) {
System.out.println(doc);
System.out.println("id: " + doc.getFieldValue("id"));
System.out.println("title: " + doc.getFieldValue("title"));
System.out.println("cat: " + doc.getFieldValue("cat"));
System.out.println("test_s: " + doc.getFieldValue("test_s"));
System.out.println();
} } catch (Exception e) {
// TODO: handle exception
}
} public static void conditionQuery() {
// test lucene expression
BooleanQuery bQuery = new BooleanQuery(); // test range expression // // +LPUBLISHTIME1:{1 TO 3}
// NumericRangeQuery numRangeQuery1 =
// NumericRangeQuery.newIntRange("lpublishtime1".toUpperCase(), 1, 3,
// false, false);
// bQuery.add(numRangeQuery1, BooleanClause.Occur.MUST);
// System.out.println(bQuery);
//
// // +LPUBLISHTIME2:[1 TO 3}
// NumericRangeQuery numRangeQuery2 =
// NumericRangeQuery.newIntRange("lpublishtime2".toUpperCase(), 1, 3,
// true, false);
// bQuery.add(numRangeQuery2, BooleanClause.Occur.MUST);
// System.out.println(bQuery);
//
// // +LPUBLISHTIME2:[1 TO 3]
// NumericRangeQuery numRangeQuery3 =
// NumericRangeQuery.newIntRange("lpublishtime2".toUpperCase(), 1, 3,
// true, true);
// bQuery.add(numRangeQuery3, BooleanClause.Occur.MUST);
// System.out.println(bQuery); String url = "http://61.152.33.19:8983/solr/weibo";
// String url = "http://192.168.0.237:8983/solr/weibo";
// String url = "http://192.168.0.237:8983/solr/weibo";
// SolrServer server = new HttpSolrServer(url);
HttpSolrServer server = new HttpSolrServer(url);
StringBuilder q = new StringBuilder(); // "股市 OR( 股票 AND 股市) OR 股市" // q.append(" +");
// q.append("股市 OR( 股票 AND 股市) OR 股市"); // q.append("ANALYKEYWORDLIST:云南"); q.append("*:*"); // q.append(" +");
// q.append("URL:-http*"); // q.append(" +");
// q.append("ACCOUNT:dsfewfwefpink"); // q.append(" +");
// q.append("LPUBLISHTIME:[1356577413000 TO 1356577413001}");
// q.append("LPUBLISHTIME:[1356577413000 TO 1356577413000]"); SolrQuery query = new SolrQuery();
// Map<String, String> map = new HashMap<String, String>();
// map.put("wt", "xml");
// query.set("wt", "xml"); // query.add("fq", "-URL:http://*"); // "云南" // map.put("hl.fl", "name");
// SolrParams params = new MapSolrParams(map);
// query.add(params);
// query.setHighlight(true); int start = 0;
int rows = 10;
query.setStart(start);
query.setRows(rows); query.setQuery(q.toString());
// Date parseDate = null;
// try {
// parseDate = DateFormat.parseDate("2013-01-15 00:00:00");
// } catch (Exception e1) {
// // TODO Auto-generated catch block
// e1.printStackTrace();
// }
// query.set("fq", "lpublishtime".toUpperCase() + ":[" +
// parseDate.getTime() + " TO *]");
// query.addSortField("lpublishtime".toUpperCase(), ORDER.desc);
try {
// query.setHighlight(true)
// // 设置开头
// .addHighlightField("CONTENT") // 高亮字段
// .setHighlightSimplePre("<span class=’highlight’>").setHighlightSimplePost("</span>")
// // 设置结尾
// .setStart(0).setRows(10);// 设置行数
//
// // 设置高亮的哪些区域
// query.setParam("hl.fl", "CONTENT"); QueryResponse response = server.query(query);
SolrDocumentList docs = response.getResults(); // SolrDocumentList list = response.getResults();
//
// System.out.println("高亮显示:");
// for (SolrDocument sd : list) {
// String id = (String) sd.getFieldValue("ID");
// if (response.getHighlighting().get(id) != null) {
// System.out.println(response.getHighlighting().get(id).get("CONTENT"));
//
// }
// } System.out.println("文档个数:" + response);
System.out.println("文档个数:" + docs.getNumFound());
System.out.println("查询时间:" + response.getQTime());
System.out.println(docs);
for (SolrDocument doc : docs) {
System.out.println(doc);
System.out.println("id: " + doc.getFieldValue("ID"));
System.out.println("title: " + doc.getFieldValue("title"));
System.out.println("CONTENT: " + doc.getFieldValue("CONTENT"));
System.out.println("test_s: " + doc.getFieldValue("test_s"));
System.out.println();
} } catch (Exception e) {
e.printStackTrace();
// TODO: handle exception
} server.shutdown();
} public static void multiQuery() {
try {
// String url = "http://localhost:80/solr";
// SolrServer server = new HttpSolrServer(url); String zkHost = "192.168.0.237:2181/solr";
SolrServer server = new CloudSolrServer(zkHost); // String url = "http://localhost:80/solr"; SolrQuery query = new SolrQuery("云南");
// SolrQuery query = new SolrQuery();
// Map<String, String> map = new HashMap<String, String>();
// // map.put(FacetParams.FACET_DATE, "manufacturedate_dt");
// // map.put(FacetParams.FACET_DATE_START, "2004-01-01T00:00:00Z");
// // map.put(FacetParams.FACET_DATE_END, "2010-01-01T00:00:00Z");
// // map.put(FacetParams.FACET_DATE_GAP, "+1YEAR");
// // map.put("indent", "on");
// map.put("wt", "xml");
// // map.put("hl.fl", "name");
// SolrParams params = new MapSolrParams(map);
// query.add(params);
// query.setHighlight(true); String shards = "localhost:80/solr,localhost:80/solr/weibo"; Map<String, String> map = new HashMap<String, String>();
map.put("q", "*:*");
map.put("collection", "weibo");
// map.put("shards", shards); // SolrParams params = new MapSolrParams(map);
// query.add(params); SolrParams solrParams = new MapSolrParams(map); // ModifiableSolrParams solrParams = new ModifiableSolrParams();
// solrParams.set("q", "*:*");
// solrParams.set("shards", shards); // String shards = "localhost:8983/solr,localhost:7574/solr";
// StringBuffer request = new StringBuffer();
// request.append("&q=" + query);
// request.append("&shards=" + shards);
// SolrParams solrParams =
// SolrRequestParsers.parseQueryString(request.toString()); QueryResponse response = server.query(solrParams);
// QueryResponse response = server.query(query);
SolrDocumentList docs = response.getResults(); System.out.println("文档个数:" + response);
System.out.println("文档个数:" + docs.getNumFound());
System.out.println("查询时间:" + response.getQTime());
System.out.println(docs);
for (SolrDocument doc : docs) {
System.out.println(doc);
System.out.println("id: " + doc.getFieldValue("id"));
System.out.println("title: " + doc.getFieldValue("title"));
System.out.println("content: " + doc.getFieldValue("content"));
System.out.println("test_s: " + doc.getFieldValue("test_s"));
System.out.println();
} // release the resource
server.shutdown(); } catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}
} public static void delete() {
try {
// 不指定core,solr会采用默认配置collection1
String url = "http://61.152.33.19:8983/solr/bbs";
// String url = "http://192.168.0.237:8983/solr/weibo";
// // String url = "http://localhost:80/solr/weibo";
// SolrServer server = new HttpSolrServer(url); // String zkHost = "192.168.0.237:2181/solr";
// CloudSolrServer server = new CloudSolrServer(zkHost);
// server.setDefaultCollection("weibo");
HttpSolrServer server = new HttpSolrServer(url);
Map<String, String> map = new HashMap<String, String>();
map.put("q", "*:*");
map.put("collection", "weibo"); SolrParams solrParams = new MapSolrParams(map); // UpdateResponse updateResponse = server.deleteByQuery(solrParams);
// UpdateResponse updateResponse = server.deleteByQuery("*:*");
// UpdateResponse updateResponse = server.deleteByQuery("*:*");
// System.out.println(updateResponse);
// 不提交不会生效
// updateResponse = server.commit();
// System.out.println(updateResponse); server.shutdown();
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}
} static final String solrUrl89 = "http://61.152.33.19:8983/solr";
static final String solrUrl35 = "http://61.159.33.33:8983/solr";
static final String solrUrl34 = "http://61.122.116.44:8983/solr"; public static void multiThreadDeleteIndex4bbsUrl() { final String url1 = solrUrl35 + "/bbs";
final String url2 = solrUrl34 + "/bbs";
final String url3 = solrUrl89 + "/bbs"; String keyword = "*:*";
// String keyword = "我要检测 OR 我要 OR 我要检测管理 OR 您点击 OR 要检测"; new DeleteSolrIndexTool4BBSUrl(url1, keyword).start();
// new DeleteSolrIndexTool4BBSUrl(url2, keyword).start();
// new DeleteSolrIndexTool4BBSUrl(url3, keyword).start();
} public static void deleteIndex4bbsUrl() { int dataCount = 0, deleteCount = 0, deleteSuccCount = 0;
int start = 0;
int rows = 100; // 不指定core,solr会采用默认配置collection1
String url = "http://61.152.33.19:8983/solr/bbs";
// String url = "http://192.168.0.237:8983/solr/weibo";
// // String url = "http://localhost:80/solr/weibo";
// SolrServer server = new HttpSolrServer(url); // String zkHost = "192.168.0.237:2181/solr";
// CloudSolrServer server = new CloudSolrServer(zkHost);
// server.setDefaultCollection("weibo");
HttpSolrServer server = new HttpSolrServer(url); boolean isDelete = true;
while (isDelete) {
try { // Map<String, String> map = new HashMap<String, String>();
// map.put("q", "*:*");
// map.put("collection", "weibo"); SolrQuery query = new SolrQuery(); query.setStart(start);
query.setRows(rows);
query.add("fl", "ID,LPUBLISHTIME,SITE,URL"); // query.setQuery("*:*");
query.setQuery("我要检测 OR 我要 OR 我要检测管理 OR 您点击 OR 要检测"); System.out.println("开始查询..." + query); QueryResponse response = server.query(query);
SolrDocumentList docs = response.getResults(); if (Validate.isEmpty(docs)) {
System.out.println("查询为空! " + query);
break;
} System.out.println("文档个数:" + docs.getNumFound() + ",查询时间:" + response.getQTime());
// System.out.println(docs);
for (SolrDocument doc : docs) {
dataCount++;
// System.out.println(doc);
System.out.println("id: " + doc.getFieldValue("id".toUpperCase())); String id = (String) doc.getFieldValue("id".toUpperCase());
String dataUrl = (String) doc.getFieldValue("url".toUpperCase()); if (dataUrl.indexOf("http://") == -1) {
deleteCount++;
System.out.println("delete " + id + " start...");
try {
UpdateResponse updateResponse = server.deleteByQuery("ID:" + id);
// 不提交不会生效
updateResponse = server.commit(); deleteSuccCount++; System.out.println("delete " + updateResponse);
} catch (Exception e) {
System.out.println("delete " + id + " error");
e.printStackTrace();
}
System.out.println("delete " + id + " end");
} // System.out.println("title: " +
// doc.getFieldValue("title"));
// System.out.println("content: " +
// doc.getFieldValue("content"));
// System.out.println("test_s: " +
// doc.getFieldValue("test_s"));
// System.out.println();
} // SolrParams solrParams = new MapSolrParams(map); // UpdateResponse updateResponse =
// server.deleteByQuery(solrParams);
// UpdateResponse updateResponse = server.deleteByQuery("*:*");
// UpdateResponse updateResponse = server.deleteByQuery("*:*");
// System.out.println(updateResponse);
// // 不提交不会生效
// updateResponse = server.commit();
// System.out.println(updateResponse); } catch (Exception e) {
System.out.println("deleteIndex4bbsUrl handle error:" + e.getMessage());
// TODO: handle exception
e.printStackTrace();
} start += rows;
System.out.println("start=" + start + ",rows=" + rows + ",dataCount=" + dataCount + ",deleteCount=" + deleteCount + ",deleteSuccCount=" + deleteSuccCount);
} System.out.println("运行完毕~!");
System.out.println("start=" + start + ",rows=" + rows + ",dataCount=" + dataCount + ",deleteCount=" + deleteCount + ",deleteSuccCount=" + deleteSuccCount); try {
server.shutdown();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} public static void getSolrServer() {
SolrManager solrManager = new SolrManager();
// String coreName = "collection1";
String coreName = "weibo";
try {
SolrServer solrServer = solrManager.getSolrServer(coreName);
System.out.println(solrServer);
// Use solrServer operation
} catch (SolrServerException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} } class DeleteSolrIndexTool4BBSUrl extends Thread { private String url;
private String logName;
private String keyword; public DeleteSolrIndexTool4BBSUrl(String url, String keyword) {
super();
this.url = url;
this.keyword = keyword;
} public void run() {
this.logName = Thread.currentThread().getName(); int dataCount = 0, deleteCount = 0, deleteSuccCount = 0;
int start = 0;
int rows = 100; // 不指定core,solr会采用默认配置collection1 HttpSolrServer server = new HttpSolrServer(url); boolean isDelete = true;
while (isDelete) {
try { SolrQuery query = new SolrQuery(); query.setStart(start);
query.setRows(rows);
query.add("fl", "ID,LPUBLISHTIME,SITE,URL"); // query.setQuery("*:*"); // query.setQuery("我要检测 OR 我要 OR 我要检测管理 OR 您点击 OR 要检测");
query.setQuery(keyword); System.out.println(logName + " " + "开始查询..." + query); QueryResponse response = server.query(query);
SolrDocumentList docs = response.getResults(); if (Validate.isEmpty(docs)) {
System.out.println(logName + " " + "查询为空! " + query);
break;
} System.out.println(logName + " " + "文档个数:" + docs.getNumFound() + ",查询时间:" + response.getQTime());
// System.out.println(docs);
for (SolrDocument doc : docs) {
dataCount++;
// System.out.println(doc);
System.out.println(logName + " " + "id: " + doc.getFieldValue("id".toUpperCase())); String id = (String) doc.getFieldValue("id".toUpperCase());
String dataUrl = (String) doc.getFieldValue("url".toUpperCase()); if (dataUrl.indexOf("http://") == -1) {
deleteCount++;
System.out.println(logName + " " + "delete " + id + " start...");
try {
UpdateResponse updateResponse = server.deleteByQuery("ID:" + id);
// 不提交不会生效
updateResponse = server.commit(); deleteSuccCount++; System.out.println(logName + " " + "delete " + updateResponse);
} catch (Exception e) {
System.out.println(logName + " " + "delete " + id + " error");
e.printStackTrace();
}
System.out.println(logName + " " + "delete " + id + " end");
}
} } catch (Exception e) {
System.out.println(logName + " " + "deleteIndex4bbsUrl handle error:" + e.getMessage());
// TODO: handle exception
e.printStackTrace();
} start += rows;
System.out.println(logName + " " + "start=" + start + ",rows=" + rows + ",dataCount=" + dataCount + ",deleteCount=" + deleteCount + ",deleteSuccCount=" + deleteSuccCount);
} System.out.println(logName + " " + "运行完毕~!");
System.out.println(logName + " " + "start=" + start + ",rows=" + rows + ",dataCount=" + dataCount + ",deleteCount=" + deleteCount + ",deleteSuccCount=" + deleteSuccCount); try {
server.shutdown();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
} }
}
 

Solr局部或指定字段更新之set用法的更多相关文章

  1. Entity Framework 同一个上下文中,如何进行对同一个实体进行指定字段更新

    转自 http://www.cnblogs.com/flyfish2012/archive/2013/03/13/2957125.html 我在上一篇EF更新指定的字段当中介绍了,如何在EF指定字段进 ...

  2. Entity Framework中实现指定字段更新

    foreach (var entity in databasePatents) { var patentTmp = sourcePClist.FirstOrDefault(p => p.Oid ...

  3. MongoDB学习笔记~为IMongoRepository接口更新指定字段

    回到目录 对于MongoDB来说,它的更新建议是对指定字段来说的,即不是把对象里的所有字段都进行update,而是按需去更新,这在性能上是最优的,这当然也是非常容易理解的,我们今天要实现的就是这种按需 ...

  4. 开发笔记:基于EntityFramework.Extended用EF实现指定字段的更新

    今天在将一个项目中使用存储过程的遗留代码迁移至新的架构时,遇到了一个问题——如何用EF实现数据库中指定字段的更新(根据UserId更新Users表中的FaceUrl与AvatarUrl字段)? 原先调 ...

  5. EF更新指定字段.或个更新整个实体

    EF更新指定字段.或个更新整个实体 更新整个实体: public bool Update(Company compay) { if (compay != null) { dbContext.Entry ...

  6. 关于EF更新数据库,更新指定字段的设置

    1.关于EF跟新数据库更新指定字段的设置 在EF提交到数据库的时候或许某些字段不想更新.或者自己更新一个模型到数据库去! 1.更新数据不更新一些字段 /// <summary> /// 数 ...

  7. OnionArch - 如何实现更新指定字段的通用Handler

    博主最近失业在家,找工作之余,自己动手写了个洋葱架构(整洁架构)解决方案,以总结和整理以前的项目经验,起名叫OnionArch,其目的是为了更好的实现采用DDD(领域驱动分析)和命令查询职责分离(CQ ...

  8. Oracle中使用游标转换数据表中指定字段内容格式(拼音转数字)

    应用场景:将数据表TB_USER中字段NNDP的内容中为[sannanyinv]转换为[3男1女] 主要脚本:一个游标脚本+分割字符串函数+拼音转数字脚本 操作步骤如下: 1.创建类型 create ...

  9. 原子类型字段更新器AtomicXxxxFieldUpdater

    本博客系列是学习并发编程过程中的记录总结.由于文章比较多,写的时间也比较散,所以我整理了个目录贴(传送门),方便查阅. 并发编程系列博客传送门 原子类型字段更新器 在java.util.concurr ...

随机推荐

  1. BZOJ3772 精神污染 【主席树 + dfs序】

    题目 兵库县位于日本列岛的中央位置,北临日本海,南面濑户内海直通太平洋,中央部位是森林和山地,与拥有关西机场的大阪府比邻而居,是关西地区面积最大的县,是集经济和文化于一体的一大地区,是日本西部门户,海 ...

  2. spring项目启动报错BeanFactory not initialized or already closed

    spring项目启动的时候报如下错误: java.lang.IllegalStateException: BeanFactory not initialized or already closed - ...

  3. Python之文件操作:经验总结

    1.怎么判断读出来的文件是gbk还是utf-8编码 if content == u'中国'.encode('gbk'):     return 'gbk' elif content == u'中国'. ...

  4. Bzoj2829 信用卡凸包

    Time Limit: 10 Sec  Memory Limit: 128 MBSec  Special JudgeSubmit: 333  Solved: 155 Description Input ...

  5. 洛谷P2168 荷马史诗

    哈夫曼树原理. k=2时,和合并果子一样一样的. 由此思考,k>2时,应该也有相似的原理.确实如此,k进制哈夫曼树,每个结点最多会有k-1个子结点,对应k-1个元素(“元素”可以是更深层的子树) ...

  6. Codevs 4633 [Mz]树链剖分练习

    4633 [Mz]树链剖分练习 时间限制: 1 s 空间限制: 64000 KB 题目等级 : 大师 Master 题目描述 Description 给定一棵结点数为n的树,初始点权均为0,有依次q个 ...

  7. 【CF1043A】Elections(签到)

    题意:给定n个数字,第i个为a[i],求使得sigma k-a[i]>sigma a[i]最小的k n,a[i]<=1e2 思路: #include<cstdio> #incl ...

  8. 使用putty通过证书登录Linux

    refer to: https://www.aliyun.com/jiaocheng/200196.html

  9. [Machine Learning with Python] My First Data Preprocessing Pipeline with Titanic Dataset

    The Dataset was acquired from https://www.kaggle.com/c/titanic For data preprocessing, I firstly def ...

  10. NanoPC-T3 64位裸机编程 —— 启动和运行状态切换

    参考: https://github.com/metro94/s5p6818_spl https://github.com/trebisky/Fire3/tree/master/Boot_NSIH h ...