HubbleDotNet 使用类
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Hubble.SQLClient;
using System.Configuration;
using RJ.Entity.UI_A;
using System.Data;
using Hubble.Core.Analysis.HighLight;
using Hubble.Analyzer;
using RJ.DBConnection;
using RJ.DALS.UI_A;
using RJ.Common;
using RJ.Entity.UI_A.AboutRJ;
namespace RJ.DAL.UI_A
{
public class DHubble : UI_A_DALBaseClass
{
private static string _TitleAnalyzerName = null;
private static string _ContentAnalyzerName = null;
private const int CacheTimeout = 0; //In seconds
public static void GetAnalyzerName(HubbleAsyncConnection conn, string tableName)
{
if (_TitleAnalyzerName != null && _ContentAnalyzerName != null)
{
return;
}
string sql = string.Format("exec SP_Columns '{0}'", tableName.Replace("'", "''"));
HubbleCommand cmd = new HubbleCommand(sql, conn);
foreach (System.Data.DataRow row in cmd.Query().Tables[0].Rows)
{
if (row["FieldName"].ToString().Equals("Title", StringComparison.CurrentCultureIgnoreCase))
{
_TitleAnalyzerName = row["Analyzer"].ToString();
}
if (row["FieldName"].ToString().Equals("Content", StringComparison.CurrentCultureIgnoreCase))
{
_ContentAnalyzerName = row["Analyzer"].ToString();
}
}
}
/// <summary>
/// 搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchKnowList(int pageindex, int pagesize, string key, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "KnowSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("KnowSearch",
"documentname", (key.Split('_')[0]).ToString(), int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
//if (key.Split('_')[1] == "1")
//{
// adapter.SelectCommand = new HubbleCommand("select between @begin to @end * from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by score desc",
// conn);
//}
//else if (key.Split('_')[1] == "2")
//{
// adapter.SelectCommand = new HubbleCommand("select between @begin to @end ID,UNIID,DocumentName,publishTime from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by publishTime desc",
// conn);
//}
//else
//{
adapter.SelectCommand = new HubbleCommand("select between @begin to @end ID,UNIID,DocumentName,publishTime from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by score desc,publishTime desc",
conn);
//}
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime("9999-12-31");
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment((key.Split('_')[0]).ToString(), document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
/// <summary>
/// 文档搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchDocList(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "DocumentSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch",
"documentname", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append("select between @begin to @end id,UNIID,documentname,htmlurl,publishtime,addtime,SEO_D,PDF from DocumentSearch where ( documentname Contains @matchString or Content Contains @matchString or documentname match @matchString or Content match @matchString ) and VisitLevel ='' and release='true' order by score desc");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append("select between @begin to @end id,UNIID,documentname,htmlurl,publishtime,addtime,SEO_D,PDF from DocumentSearch where ( documentname Contains @matchString or Content Contains @matchString or documentname match @matchString or Content match @matchString ) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') and release='true' order by score desc");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
document.htmlURL = row["htmlURL"].ToString();
document.SEO_D = row["SEO_D"].ToString();
document.PDF = row["PDF"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime(row["AddTime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment(key, document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
/// <summary>
/// 软件搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchSoftWareList(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "SoftwareSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("SoftwareSearch",
"DocumentName", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append("select between @begin to @end id,documentname,publishtime,addtime,UNIID,IsCheckPartner from SoftwareSearch where (DocumentName Contains @matchString or DocumentName match @matchString) and (VisitLevel ='' or visitlevel='JingBiaoLevel') and release='true' order by score desc");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append("select between @begin to @end id,documentname,publishtime,addtime,UNIID,IsCheckPartner from SoftwareSearch where (DocumentName Contains @matchString or DocumentName match @matchString) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='' or visitlevel='JingBiaoLevel') and release='true' order by score desc");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle software = new EArticle();
software.ID = int.Parse(row["ID"].ToString());
software.Name = row["DocumentName"].ToString();
software.UpdateTime = Convert.ToDateTime(row["AddTime"].ToString());
software.UNIID = row["UNIID"].ToString();
software.IsCheckPartner = Convert.ToBoolean(row["IsCheckPartner"]);
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
software.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
software.PublishTime = Convert.ToDateTime(row["Addtime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
software.TitleHighLighter = titleHighlighter.GetBestFragment(key, software.Name);
if (string.IsNullOrEmpty(software.TitleHighLighter))
{
software.TitleHighLighter = software.Name;
}
result.Add(software);
}
sw.Stop();
return result;
}
/// <summary>
///全站搜索
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<ESearch> SearchAllList(int pagesize, int pageindex, string key, string isLevel, int typeid, out int Count, out DataTable dtcount)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "WholeSearch_CN");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("WholeSearch_CN",
"Name", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
strSql.Append(" select ");
if (pagesize != 0)
{
strSql.Append(" between @begin to @end ID,Name,TypeID,SEO_D,TypeName,strURL,addTime from ");
}
strSql.Append(" WholeSearch_CN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) ");
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append(" and (VisitLevel ='' or visitlevel='JingBiaoLevel')");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append(" and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='' or visitlevel='JingBiaoLevel') ");
}
if (typeid > 0)
{
strSql.Append(" and typeid=@typeid");
}
else
{
strSql.Insert(0, " [GroupBy('Count', 'ID', 'TypeID', 10)] ");
}
strSql.Append(" order by score desc, lorder");
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pageindex - 1) * pagesize);
adapter.SelectCommand.Parameters.Add("@end", pageindex * pagesize - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
adapter.SelectCommand.Parameters.Add("@typeid", typeid);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
if (ds.Tables.Count > 1)
{
dtcount = ds.Tables[1];
}
else
{
dtcount = null;
}
List<ESearch> result = new List<ESearch>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
ESearch search = new ESearch();
search.ID = int.Parse(row["ID"].ToString());
//search.UNIID = new Guid(row["UNIID"].ToString());
search.Name = Strings.NoHTML(row["Name"].ToString());
search.SEO_D = row["SEO_D"].ToString();
search.strURL = row["strURL"].ToString();
if (row["TypeID"].ToString() != "")
{
search.TypeID = int.Parse(row["TypeID"].ToString());
search.TypeName = row["TypeName"].ToString();
}
if (!string.IsNullOrEmpty(row["addTime"].ToString().Trim()))
{
search.addTime = Convert.ToDateTime(row["addTime"].ToString());
}
else
{
search.addTime = Convert.ToDateTime("9999-12-31");
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
search.Abstract = contentHighlighter.GetBestFragment(key, search.SEO_D);
if (string.IsNullOrEmpty(search.Abstract))
{
search.Abstract = search.SEO_D;
}
search.TitleHighLighter = titleHighlighter.GetBestFragment(key, search.Name);
if (string.IsNullOrEmpty(search.TitleHighLighter))
{
search.TitleHighLighter = search.Name;
}
result.Add(search);
}
return result;
}
/// <summary>
/// 英文文档搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> EN_SearchDocList(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "DocumentSearch_EN");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch_EN",
"documentname", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,PDF,SEO_D,publishTime,addTime from DocumentSearch_EN where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,PDF,SEO_D,publishTime,addTime from DocumentSearch_EN where ( documentname Contains @matchString or documentname match @matchString or SEO_K Contains @matchString or SEO_K match @matchString ) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') and release='true' order by score desc");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
document.SEO_D = row["SEO_D"].ToString();
document.PDF = row["PDF"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime(row["addTime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment(key, document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
/// <summary>
///英文全站搜索
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<ESearch> EN_SearchAllList(int pagesize, int pageindex, string key, string isLevel, int typeid, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "WholeSearch_EN");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("WholeSearch_EN",
"Name", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
if (pagesize == 0)
{
strSql.Append("select ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
else
{
strSql.Append("select between @begin to @end ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
strSql.Append(" WholeSearch_EN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) and VisitLevel =''");
if (typeid > 0)
{
strSql.Append(" and typeid=@typeid");
}
strSql.Append(" order by score desc, lorder");
}
else
{
levelPara = isLevel.Split(',');
if (pagesize == 0)
{
strSql.Append("select ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
else
{
strSql.Append("select between @begin to @end ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
strSql.Append(" WholeSearch_EN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') ");
if (typeid > 0)
{
strSql.Append(" and typeid=@typeid");
}
strSql.Append(" order by score desc, lorder");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pageindex - 1) * pagesize);
adapter.SelectCommand.Parameters.Add("@end", pageindex * pagesize - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
adapter.SelectCommand.Parameters.Add("@typeid", typeid);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<ESearch> result = new List<ESearch>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
ESearch search = new ESearch();
search.ID = int.Parse(row["ID"].ToString());
//search.UNIID = new Guid(row["UNIID"].ToString());
search.Name = Strings.NoHTML(row["Name"].ToString());
search.SEO_D = row["SEO_D"].ToString();
//search.content = Strings.NoHTML(row["content"].ToString());
search.TypeID = int.Parse(row["TypeID"].ToString());
search.TypeName = row["TypeName"].ToString();
//search.TXT = Strings.NoHTML(row["TXT"].ToString());
if (!string.IsNullOrEmpty(row["addTime"].ToString().Trim()))
{
search.addTime = Convert.ToDateTime(row["addTime"].ToString());
}
else
{
search.addTime = Convert.ToDateTime("9999-12-31");
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
search.Abstract = contentHighlighter.GetBestFragment(key, search.SEO_D);
if (string.IsNullOrEmpty(search.Abstract))
{
search.Abstract = search.SEO_D;
}
search.TitleHighLighter = titleHighlighter.GetBestFragment(key, search.Name);
if (string.IsNullOrEmpty(search.TitleHighLighter))
{
search.TitleHighLighter = search.Name;
}
result.Add(search);
}
return result;
}
/// <summary>
/// 中文文档搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchDocListNew(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "DocumentSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch",
"Documentname", (key.Split('_')[0]).ToString(), int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
//if (key.Split('_')[1] == "1")
//{
// if (string.IsNullOrEmpty(isLevel))
// {
// // strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by score desc");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc");
// }
// else
// {
// levelPara = isLevel.Split(',');
// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");
// for (int i = 0; i < levelPara.Length; i++)
// {
// strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
// }
// strSql.Remove(strSql.Length - 3, 2);
// strSql.Append(" or VisitLevel='') and release='true' order by score desc");
// }
//}
//else if (key.Split('_')[1] == "2")
//{
// if (string.IsNullOrEmpty(isLevel))
// {
// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by publishTime desc");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by publishTime desc");
// }
// else
// {
// levelPara = isLevel.Split(',');
// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");
// for (int i = 0; i < levelPara.Length; i++)
// {
// strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
// }
// strSql.Remove(strSql.Length - 3, 2);
// strSql.Append(" or VisitLevel='') and release='true' order by publishTime desc");
// }
//}
//else
//{
if (string.IsNullOrEmpty(isLevel))
{
//strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by score desc, publishTime desc");
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc, publishTime desc");
}
else
{
levelPara = isLevel.Split(',');
//strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') and release='true' order by score desc, publishTime desc ");
}
//}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
document.SEO_D = row["SEO_D"].ToString();
document.PDF = row["PDF"].ToString();
document.htmlURL = row["htmlURL"].ToString();
document.gid_collection = row["gid_collection"].ToString();
document.TitleHighLighter = row["DocumentName"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime(row["AddTime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment((key.Split('_')[0]).ToString(), document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
}
}
HubbleDotNet 使用类的更多相关文章
- Java类的继承与多态特性-入门笔记
相信对于继承和多态的概念性我就不在怎么解释啦!不管你是.Net还是Java面向对象编程都是比不缺少一堂课~~Net如此Java亦也有同样的思想成分包含其中. 继承,多态,封装是Java面向对象的3大特 ...
- HubbleDotNet 开源全文搜索数据库项目--为数据库现有表或视图建立全文索引(三) 多表关联全文索引模式
关系型数据库中,多表关联是很常见的事情,HubbleDotNet 可以对部分情况的多表关联形式建立关联的全文索引,这样用户就不需要专门建一个大表 来解决多表关联时的全文索引问题. 下面以 为数据库现有 ...
- C++ 可配置的类工厂
项目中常用到工厂模式,工厂模式可以把创建对象的具体细节封装到Create函数中,减少重复代码,增强可读和可维护性.传统的工厂实现如下: class Widget { public: virtual i ...
- Android请求网络共通类——Hi_博客 Android App 开发笔记
今天 ,来分享一下 ,一个博客App的开发过程,以前也没开发过这种类型App 的经验,求大神们轻点喷. 首先我们要创建一个Andriod 项目 因为要从网络请求数据所以我们先来一个请求网络的共通类. ...
- ASP.NET MVC with Entity Framework and CSS一书翻译系列文章之第二章:利用模型类创建视图、控制器和数据库
在这一章中,我们将直接进入项目,并且为产品和分类添加一些基本的模型类.我们将在Entity Framework的代码优先模式下,利用这些模型类创建一个数据库.我们还将学习如何在代码中创建数据库上下文类 ...
- ASP.NET Core 折腾笔记二:自己写个完整的Cache缓存类来支持.NET Core
背景: 1:.NET Core 已经没System.Web,也木有了HttpRuntime.Cache,因此,该空间下Cache也木有了. 2:.NET Core 有新的Memory Cache提供, ...
- .NET Core中间件的注册和管道的构建(2)---- 用UseMiddleware扩展方法注册中间件类
.NET Core中间件的注册和管道的构建(2)---- 用UseMiddleware扩展方法注册中间件类 0x00 为什么要引入扩展方法 有的中间件功能比较简单,有的则比较复杂,并且依赖其它组件.除 ...
- Java基础Map接口+Collections工具类
1.Map中我们主要讲两个接口 HashMap 与 LinkedHashMap (1)其中LinkedHashMap是有序的 怎么存怎么取出来 我们讲一下Map的增删改查功能: /* * Ma ...
- PHP-解析验证码类--学习笔记
1.开始 在 网上看到使用PHP写的ValidateCode生成验证码码类,感觉不错,特拿来分析学习一下. 2.类图 3.验证码类部分代码 3.1 定义变量 //随机因子 private $char ...
随机推荐
- MySQL如何计算动销率_20161025
动销率一般反映在采购管理上,它的公式为:商品动销率=(动销品种数 /仓库总品种数)*100% . 也可以理解为销售的商品数量和仓库库存的商品数量,假如你仓库里有100个品种,在上月销售了50种,动销率 ...
- 【Lintcode】095.Validate Binary Search Tree
题目: Given a binary tree, determine if it is a valid binary search tree (BST). Assume a BST is define ...
- MySQL 用户管理与权限管理
MySQL 用户管理与权限管理 -- 操作环境mysql> show variables like 'version'; +---------------+--------+| Variabl ...
- Hive操作笔记
hive库清表,删除数据 insert overwrite table lorry.bigdata select * from lorry.bigdata where 1=0 hive的simple模 ...
- 推荐几个Laravel 后台管理系统
小编推荐几个Laravel 后台管理系统 由百牛信息技术bainiu.ltd整理发布于博客园 一.不容错过的Laravel后台管理扩展包 —— Voyager 简介Voyager是一个你不容错过的La ...
- POJ-3616
Milking Time Time Limit: 1000MS Memory Limit: 65536K Total Submissions: 10434 Accepted: 4378 Des ...
- channelartlist中autoindex无效的解决方法
{dede:channelartlist}中有使用autoindex无效的解决方法 在设计频道首页的时候,使用{dede:channelartlist}标签时,有很多朋友想做一些高级的开发,让重复的频 ...
- putty连接虚拟机注意事项
1,虚拟机ssh服务要开 2,虚拟机最好把防火墙关掉 3,虚拟机和主机的IP要在同一网段 4,大哥,putty上面那个才是要连接的远程主机IP啊!下面那个是会话名,写什么都行. 5,可以选择UTF8, ...
- 编译出arm Android环境下的C++可执行文件
要想编译出arm环境的C++可执行文件主要就是利用交叉编译器进行编译.编译过程本身都大同小异. 1.安装交叉编译器,交叉编译器的安装方法大致有下面几种: A.debian/ubuntu 系统可以直接输 ...
- JVM内存GC的骗局
此文已由作者尧飘海授权网易云社区发布. 欢迎访问网易云社区,了解更多网易技术产品运营经验. 概述 在日常程序开发中,很多JAVA程度员不太关心内存的使用情况.当然,如果程序员运气较好或者系统没有大规模 ...