HubbleDotNet 使用类
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Hubble.SQLClient;
using System.Configuration;
using RJ.Entity.UI_A;
using System.Data;
using Hubble.Core.Analysis.HighLight;
using Hubble.Analyzer;
using RJ.DBConnection;
using RJ.DALS.UI_A;
using RJ.Common;
using RJ.Entity.UI_A.AboutRJ;
namespace RJ.DAL.UI_A
{
public class DHubble : UI_A_DALBaseClass
{
private static string _TitleAnalyzerName = null;
private static string _ContentAnalyzerName = null;
private const int CacheTimeout = 0; //In seconds
public static void GetAnalyzerName(HubbleAsyncConnection conn, string tableName)
{
if (_TitleAnalyzerName != null && _ContentAnalyzerName != null)
{
return;
}
string sql = string.Format("exec SP_Columns '{0}'", tableName.Replace("'", "''"));
HubbleCommand cmd = new HubbleCommand(sql, conn);
foreach (System.Data.DataRow row in cmd.Query().Tables[0].Rows)
{
if (row["FieldName"].ToString().Equals("Title", StringComparison.CurrentCultureIgnoreCase))
{
_TitleAnalyzerName = row["Analyzer"].ToString();
}
if (row["FieldName"].ToString().Equals("Content", StringComparison.CurrentCultureIgnoreCase))
{
_ContentAnalyzerName = row["Analyzer"].ToString();
}
}
}
/// <summary>
/// 搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchKnowList(int pageindex, int pagesize, string key, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "KnowSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("KnowSearch",
"documentname", (key.Split('_')[0]).ToString(), int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
//if (key.Split('_')[1] == "1")
//{
// adapter.SelectCommand = new HubbleCommand("select between @begin to @end * from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by score desc",
// conn);
//}
//else if (key.Split('_')[1] == "2")
//{
// adapter.SelectCommand = new HubbleCommand("select between @begin to @end ID,UNIID,DocumentName,publishTime from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by publishTime desc",
// conn);
//}
//else
//{
adapter.SelectCommand = new HubbleCommand("select between @begin to @end ID,UNIID,DocumentName,publishTime from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by score desc,publishTime desc",
conn);
//}
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime("9999-12-31");
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment((key.Split('_')[0]).ToString(), document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
/// <summary>
/// 文档搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchDocList(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "DocumentSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch",
"documentname", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append("select between @begin to @end id,UNIID,documentname,htmlurl,publishtime,addtime,SEO_D,PDF from DocumentSearch where ( documentname Contains @matchString or Content Contains @matchString or documentname match @matchString or Content match @matchString ) and VisitLevel ='' and release='true' order by score desc");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append("select between @begin to @end id,UNIID,documentname,htmlurl,publishtime,addtime,SEO_D,PDF from DocumentSearch where ( documentname Contains @matchString or Content Contains @matchString or documentname match @matchString or Content match @matchString ) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') and release='true' order by score desc");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
document.htmlURL = row["htmlURL"].ToString();
document.SEO_D = row["SEO_D"].ToString();
document.PDF = row["PDF"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime(row["AddTime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment(key, document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
/// <summary>
/// 软件搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchSoftWareList(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "SoftwareSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("SoftwareSearch",
"DocumentName", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append("select between @begin to @end id,documentname,publishtime,addtime,UNIID,IsCheckPartner from SoftwareSearch where (DocumentName Contains @matchString or DocumentName match @matchString) and (VisitLevel ='' or visitlevel='JingBiaoLevel') and release='true' order by score desc");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append("select between @begin to @end id,documentname,publishtime,addtime,UNIID,IsCheckPartner from SoftwareSearch where (DocumentName Contains @matchString or DocumentName match @matchString) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='' or visitlevel='JingBiaoLevel') and release='true' order by score desc");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle software = new EArticle();
software.ID = int.Parse(row["ID"].ToString());
software.Name = row["DocumentName"].ToString();
software.UpdateTime = Convert.ToDateTime(row["AddTime"].ToString());
software.UNIID = row["UNIID"].ToString();
software.IsCheckPartner = Convert.ToBoolean(row["IsCheckPartner"]);
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
software.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
software.PublishTime = Convert.ToDateTime(row["Addtime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
software.TitleHighLighter = titleHighlighter.GetBestFragment(key, software.Name);
if (string.IsNullOrEmpty(software.TitleHighLighter))
{
software.TitleHighLighter = software.Name;
}
result.Add(software);
}
sw.Stop();
return result;
}
/// <summary>
///全站搜索
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<ESearch> SearchAllList(int pagesize, int pageindex, string key, string isLevel, int typeid, out int Count, out DataTable dtcount)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "WholeSearch_CN");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("WholeSearch_CN",
"Name", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
strSql.Append(" select ");
if (pagesize != 0)
{
strSql.Append(" between @begin to @end ID,Name,TypeID,SEO_D,TypeName,strURL,addTime from ");
}
strSql.Append(" WholeSearch_CN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) ");
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append(" and (VisitLevel ='' or visitlevel='JingBiaoLevel')");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append(" and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='' or visitlevel='JingBiaoLevel') ");
}
if (typeid > 0)
{
strSql.Append(" and typeid=@typeid");
}
else
{
strSql.Insert(0, " [GroupBy('Count', 'ID', 'TypeID', 10)] ");
}
strSql.Append(" order by score desc, lorder");
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pageindex - 1) * pagesize);
adapter.SelectCommand.Parameters.Add("@end", pageindex * pagesize - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
adapter.SelectCommand.Parameters.Add("@typeid", typeid);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
if (ds.Tables.Count > 1)
{
dtcount = ds.Tables[1];
}
else
{
dtcount = null;
}
List<ESearch> result = new List<ESearch>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
ESearch search = new ESearch();
search.ID = int.Parse(row["ID"].ToString());
//search.UNIID = new Guid(row["UNIID"].ToString());
search.Name = Strings.NoHTML(row["Name"].ToString());
search.SEO_D = row["SEO_D"].ToString();
search.strURL = row["strURL"].ToString();
if (row["TypeID"].ToString() != "")
{
search.TypeID = int.Parse(row["TypeID"].ToString());
search.TypeName = row["TypeName"].ToString();
}
if (!string.IsNullOrEmpty(row["addTime"].ToString().Trim()))
{
search.addTime = Convert.ToDateTime(row["addTime"].ToString());
}
else
{
search.addTime = Convert.ToDateTime("9999-12-31");
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
search.Abstract = contentHighlighter.GetBestFragment(key, search.SEO_D);
if (string.IsNullOrEmpty(search.Abstract))
{
search.Abstract = search.SEO_D;
}
search.TitleHighLighter = titleHighlighter.GetBestFragment(key, search.Name);
if (string.IsNullOrEmpty(search.TitleHighLighter))
{
search.TitleHighLighter = search.Name;
}
result.Add(search);
}
return result;
}
/// <summary>
/// 英文文档搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> EN_SearchDocList(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "DocumentSearch_EN");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch_EN",
"documentname", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,PDF,SEO_D,publishTime,addTime from DocumentSearch_EN where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,PDF,SEO_D,publishTime,addTime from DocumentSearch_EN where ( documentname Contains @matchString or documentname match @matchString or SEO_K Contains @matchString or SEO_K match @matchString ) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') and release='true' order by score desc");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
document.SEO_D = row["SEO_D"].ToString();
document.PDF = row["PDF"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime(row["addTime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment(key, document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
/// <summary>
///英文全站搜索
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<ESearch> EN_SearchAllList(int pagesize, int pageindex, string key, string isLevel, int typeid, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "WholeSearch_EN");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("WholeSearch_EN",
"Name", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
if (pagesize == 0)
{
strSql.Append("select ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
else
{
strSql.Append("select between @begin to @end ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
strSql.Append(" WholeSearch_EN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) and VisitLevel =''");
if (typeid > 0)
{
strSql.Append(" and typeid=@typeid");
}
strSql.Append(" order by score desc, lorder");
}
else
{
levelPara = isLevel.Split(',');
if (pagesize == 0)
{
strSql.Append("select ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
else
{
strSql.Append("select between @begin to @end ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
strSql.Append(" WholeSearch_EN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') ");
if (typeid > 0)
{
strSql.Append(" and typeid=@typeid");
}
strSql.Append(" order by score desc, lorder");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pageindex - 1) * pagesize);
adapter.SelectCommand.Parameters.Add("@end", pageindex * pagesize - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
adapter.SelectCommand.Parameters.Add("@typeid", typeid);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<ESearch> result = new List<ESearch>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
ESearch search = new ESearch();
search.ID = int.Parse(row["ID"].ToString());
//search.UNIID = new Guid(row["UNIID"].ToString());
search.Name = Strings.NoHTML(row["Name"].ToString());
search.SEO_D = row["SEO_D"].ToString();
//search.content = Strings.NoHTML(row["content"].ToString());
search.TypeID = int.Parse(row["TypeID"].ToString());
search.TypeName = row["TypeName"].ToString();
//search.TXT = Strings.NoHTML(row["TXT"].ToString());
if (!string.IsNullOrEmpty(row["addTime"].ToString().Trim()))
{
search.addTime = Convert.ToDateTime(row["addTime"].ToString());
}
else
{
search.addTime = Convert.ToDateTime("9999-12-31");
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
search.Abstract = contentHighlighter.GetBestFragment(key, search.SEO_D);
if (string.IsNullOrEmpty(search.Abstract))
{
search.Abstract = search.SEO_D;
}
search.TitleHighLighter = titleHighlighter.GetBestFragment(key, search.Name);
if (string.IsNullOrEmpty(search.TitleHighLighter))
{
search.TitleHighLighter = search.Name;
}
result.Add(search);
}
return result;
}
/// <summary>
/// 中文文档搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchDocListNew(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "DocumentSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch",
"Documentname", (key.Split('_')[0]).ToString(), int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
//if (key.Split('_')[1] == "1")
//{
// if (string.IsNullOrEmpty(isLevel))
// {
// // strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by score desc");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc");
// }
// else
// {
// levelPara = isLevel.Split(',');
// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");
// for (int i = 0; i < levelPara.Length; i++)
// {
// strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
// }
// strSql.Remove(strSql.Length - 3, 2);
// strSql.Append(" or VisitLevel='') and release='true' order by score desc");
// }
//}
//else if (key.Split('_')[1] == "2")
//{
// if (string.IsNullOrEmpty(isLevel))
// {
// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by publishTime desc");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by publishTime desc");
// }
// else
// {
// levelPara = isLevel.Split(',');
// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");
// for (int i = 0; i < levelPara.Length; i++)
// {
// strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
// }
// strSql.Remove(strSql.Length - 3, 2);
// strSql.Append(" or VisitLevel='') and release='true' order by publishTime desc");
// }
//}
//else
//{
if (string.IsNullOrEmpty(isLevel))
{
//strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by score desc, publishTime desc");
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc, publishTime desc");
}
else
{
levelPara = isLevel.Split(',');
//strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') and release='true' order by score desc, publishTime desc ");
}
//}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
document.SEO_D = row["SEO_D"].ToString();
document.PDF = row["PDF"].ToString();
document.htmlURL = row["htmlURL"].ToString();
document.gid_collection = row["gid_collection"].ToString();
document.TitleHighLighter = row["DocumentName"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime(row["AddTime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment((key.Split('_')[0]).ToString(), document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
}
}
HubbleDotNet 使用类的更多相关文章
- Java类的继承与多态特性-入门笔记
相信对于继承和多态的概念性我就不在怎么解释啦!不管你是.Net还是Java面向对象编程都是比不缺少一堂课~~Net如此Java亦也有同样的思想成分包含其中. 继承,多态,封装是Java面向对象的3大特 ...
- HubbleDotNet 开源全文搜索数据库项目--为数据库现有表或视图建立全文索引(三) 多表关联全文索引模式
关系型数据库中,多表关联是很常见的事情,HubbleDotNet 可以对部分情况的多表关联形式建立关联的全文索引,这样用户就不需要专门建一个大表 来解决多表关联时的全文索引问题. 下面以 为数据库现有 ...
- C++ 可配置的类工厂
项目中常用到工厂模式,工厂模式可以把创建对象的具体细节封装到Create函数中,减少重复代码,增强可读和可维护性.传统的工厂实现如下: class Widget { public: virtual i ...
- Android请求网络共通类——Hi_博客 Android App 开发笔记
今天 ,来分享一下 ,一个博客App的开发过程,以前也没开发过这种类型App 的经验,求大神们轻点喷. 首先我们要创建一个Andriod 项目 因为要从网络请求数据所以我们先来一个请求网络的共通类. ...
- ASP.NET MVC with Entity Framework and CSS一书翻译系列文章之第二章:利用模型类创建视图、控制器和数据库
在这一章中,我们将直接进入项目,并且为产品和分类添加一些基本的模型类.我们将在Entity Framework的代码优先模式下,利用这些模型类创建一个数据库.我们还将学习如何在代码中创建数据库上下文类 ...
- ASP.NET Core 折腾笔记二:自己写个完整的Cache缓存类来支持.NET Core
背景: 1:.NET Core 已经没System.Web,也木有了HttpRuntime.Cache,因此,该空间下Cache也木有了. 2:.NET Core 有新的Memory Cache提供, ...
- .NET Core中间件的注册和管道的构建(2)---- 用UseMiddleware扩展方法注册中间件类
.NET Core中间件的注册和管道的构建(2)---- 用UseMiddleware扩展方法注册中间件类 0x00 为什么要引入扩展方法 有的中间件功能比较简单,有的则比较复杂,并且依赖其它组件.除 ...
- Java基础Map接口+Collections工具类
1.Map中我们主要讲两个接口 HashMap 与 LinkedHashMap (1)其中LinkedHashMap是有序的 怎么存怎么取出来 我们讲一下Map的增删改查功能: /* * Ma ...
- PHP-解析验证码类--学习笔记
1.开始 在 网上看到使用PHP写的ValidateCode生成验证码码类,感觉不错,特拿来分析学习一下. 2.类图 3.验证码类部分代码 3.1 定义变量 //随机因子 private $char ...
随机推荐
- codevs 1576最长严格上升子序列
传送门 1576 最长严格上升子序列 时间限制: 1 s 空间限制: 256000 KB 题目等级 : 黄金 Gold 题目描述 Description 给一个数组a1, a2 ... an ...
- 一般项目转为Maven项目所遇到的问题
最近搞CI,准备使用Maven,但以前的项目不是Maven项目,需要把项目转换为Maven项目.这遇到几个小问题,一是jar包的依赖,二是从本地仓库取出依赖jar包. 由于没有本地仓库,要手动添加ja ...
- 什么是BI(Business Intelligence)
一.BI的定义 BI是Business Intelligence的英文缩写,中文解释为商务智能,用来帮助企业更好地利用数据提高决策质量的技术集合,是从大量的数据中钻取信息与知识的过程.简单讲就是业务. ...
- Unity3d笔记
当变量重命名后,已序列化保存的值会丢失,如果希望继续保留其数值,可使用FormerlySerializedAs,如下代码所示: [UnityEngine.Serialization.FormerlyS ...
- Struts2 + easyui的DataGrid 分页
jsp页面 js代码: $(function() { $('#ff').hide(); $('#tt').datagrid({ title : '信息显示', iconCls : 'icon-save ...
- gin-swagger包Api文档生成, Post请求参数无法接收, 问题修复。
Bug描述 FormData方式下,任意参数类型都只生成file参数类型. 问题重现 问题代码在这一行 github.com\swaggo\swag\operation.go : 131 line c ...
- Eclipse与IntelliJ IDEA区别
1.没有workspace,新增modules(Workspace-Project,Project-Module) 2.没有perspectives,自动根据上下文调用相关工具 3.没有保存按钮,自动 ...
- 结合webpack 一步一步实现懒加载的国际化简易版方案
想法来源于工作需求,最后倒腾出一个国际化工作流,期望是这样的: 1. 自动化处理国际化文案(表现为转义翻译人员给到的文件处理成技术人员所识别的文件) 2. 转化后的国际化文件能够按需加载,实现懒加载的 ...
- u17 u18共存
公司用的Unity版本是2017版本的,由于需要尝试一些实验性的新功能,我就安装了Unity2018版本,结果发现Unity2018版本破解之后,Unity2017版本不能用了.那么怎么解决两个版本的 ...
- Spark HA 配置中spark.deploy.zookeeper.url 的意思
Spark HA的配置网上很多,最近我在看王林的Spark的视频,要付费的.那个人牛B吹得很大,本事应该是有的,但是有本事,不一定就是好老师.一开始吹中国第一,吹着吹着就变成世界第一.就算你真的是世界 ...