HubbleDotNet 使用类
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Hubble.SQLClient;
using System.Configuration;
using RJ.Entity.UI_A;
using System.Data;
using Hubble.Core.Analysis.HighLight;
using Hubble.Analyzer;
using RJ.DBConnection;
using RJ.DALS.UI_A;
using RJ.Common;
using RJ.Entity.UI_A.AboutRJ;
namespace RJ.DAL.UI_A
{
public class DHubble : UI_A_DALBaseClass
{
private static string _TitleAnalyzerName = null;
private static string _ContentAnalyzerName = null;
private const int CacheTimeout = 0; //In seconds
public static void GetAnalyzerName(HubbleAsyncConnection conn, string tableName)
{
if (_TitleAnalyzerName != null && _ContentAnalyzerName != null)
{
return;
}
string sql = string.Format("exec SP_Columns '{0}'", tableName.Replace("'", "''"));
HubbleCommand cmd = new HubbleCommand(sql, conn);
foreach (System.Data.DataRow row in cmd.Query().Tables[0].Rows)
{
if (row["FieldName"].ToString().Equals("Title", StringComparison.CurrentCultureIgnoreCase))
{
_TitleAnalyzerName = row["Analyzer"].ToString();
}
if (row["FieldName"].ToString().Equals("Content", StringComparison.CurrentCultureIgnoreCase))
{
_ContentAnalyzerName = row["Analyzer"].ToString();
}
}
}
/// <summary>
/// 搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchKnowList(int pageindex, int pagesize, string key, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "KnowSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("KnowSearch",
"documentname", (key.Split('_')[0]).ToString(), int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
//if (key.Split('_')[1] == "1")
//{
// adapter.SelectCommand = new HubbleCommand("select between @begin to @end * from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by score desc",
// conn);
//}
//else if (key.Split('_')[1] == "2")
//{
// adapter.SelectCommand = new HubbleCommand("select between @begin to @end ID,UNIID,DocumentName,publishTime from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by publishTime desc",
// conn);
//}
//else
//{
adapter.SelectCommand = new HubbleCommand("select between @begin to @end ID,UNIID,DocumentName,publishTime from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by score desc,publishTime desc",
conn);
//}
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime("9999-12-31");
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment((key.Split('_')[0]).ToString(), document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
/// <summary>
/// 文档搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchDocList(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "DocumentSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch",
"documentname", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append("select between @begin to @end id,UNIID,documentname,htmlurl,publishtime,addtime,SEO_D,PDF from DocumentSearch where ( documentname Contains @matchString or Content Contains @matchString or documentname match @matchString or Content match @matchString ) and VisitLevel ='' and release='true' order by score desc");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append("select between @begin to @end id,UNIID,documentname,htmlurl,publishtime,addtime,SEO_D,PDF from DocumentSearch where ( documentname Contains @matchString or Content Contains @matchString or documentname match @matchString or Content match @matchString ) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') and release='true' order by score desc");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
document.htmlURL = row["htmlURL"].ToString();
document.SEO_D = row["SEO_D"].ToString();
document.PDF = row["PDF"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime(row["AddTime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment(key, document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
/// <summary>
/// 软件搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchSoftWareList(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "SoftwareSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("SoftwareSearch",
"DocumentName", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append("select between @begin to @end id,documentname,publishtime,addtime,UNIID,IsCheckPartner from SoftwareSearch where (DocumentName Contains @matchString or DocumentName match @matchString) and (VisitLevel ='' or visitlevel='JingBiaoLevel') and release='true' order by score desc");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append("select between @begin to @end id,documentname,publishtime,addtime,UNIID,IsCheckPartner from SoftwareSearch where (DocumentName Contains @matchString or DocumentName match @matchString) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='' or visitlevel='JingBiaoLevel') and release='true' order by score desc");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle software = new EArticle();
software.ID = int.Parse(row["ID"].ToString());
software.Name = row["DocumentName"].ToString();
software.UpdateTime = Convert.ToDateTime(row["AddTime"].ToString());
software.UNIID = row["UNIID"].ToString();
software.IsCheckPartner = Convert.ToBoolean(row["IsCheckPartner"]);
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
software.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
software.PublishTime = Convert.ToDateTime(row["Addtime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
software.TitleHighLighter = titleHighlighter.GetBestFragment(key, software.Name);
if (string.IsNullOrEmpty(software.TitleHighLighter))
{
software.TitleHighLighter = software.Name;
}
result.Add(software);
}
sw.Stop();
return result;
}
/// <summary>
///全站搜索
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<ESearch> SearchAllList(int pagesize, int pageindex, string key, string isLevel, int typeid, out int Count, out DataTable dtcount)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "WholeSearch_CN");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("WholeSearch_CN",
"Name", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
strSql.Append(" select ");
if (pagesize != 0)
{
strSql.Append(" between @begin to @end ID,Name,TypeID,SEO_D,TypeName,strURL,addTime from ");
}
strSql.Append(" WholeSearch_CN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) ");
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append(" and (VisitLevel ='' or visitlevel='JingBiaoLevel')");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append(" and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='' or visitlevel='JingBiaoLevel') ");
}
if (typeid > 0)
{
strSql.Append(" and typeid=@typeid");
}
else
{
strSql.Insert(0, " [GroupBy('Count', 'ID', 'TypeID', 10)] ");
}
strSql.Append(" order by score desc, lorder");
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pageindex - 1) * pagesize);
adapter.SelectCommand.Parameters.Add("@end", pageindex * pagesize - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
adapter.SelectCommand.Parameters.Add("@typeid", typeid);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
if (ds.Tables.Count > 1)
{
dtcount = ds.Tables[1];
}
else
{
dtcount = null;
}
List<ESearch> result = new List<ESearch>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
ESearch search = new ESearch();
search.ID = int.Parse(row["ID"].ToString());
//search.UNIID = new Guid(row["UNIID"].ToString());
search.Name = Strings.NoHTML(row["Name"].ToString());
search.SEO_D = row["SEO_D"].ToString();
search.strURL = row["strURL"].ToString();
if (row["TypeID"].ToString() != "")
{
search.TypeID = int.Parse(row["TypeID"].ToString());
search.TypeName = row["TypeName"].ToString();
}
if (!string.IsNullOrEmpty(row["addTime"].ToString().Trim()))
{
search.addTime = Convert.ToDateTime(row["addTime"].ToString());
}
else
{
search.addTime = Convert.ToDateTime("9999-12-31");
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
search.Abstract = contentHighlighter.GetBestFragment(key, search.SEO_D);
if (string.IsNullOrEmpty(search.Abstract))
{
search.Abstract = search.SEO_D;
}
search.TitleHighLighter = titleHighlighter.GetBestFragment(key, search.Name);
if (string.IsNullOrEmpty(search.TitleHighLighter))
{
search.TitleHighLighter = search.Name;
}
result.Add(search);
}
return result;
}
/// <summary>
/// 英文文档搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> EN_SearchDocList(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "DocumentSearch_EN");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch_EN",
"documentname", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,PDF,SEO_D,publishTime,addTime from DocumentSearch_EN where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,PDF,SEO_D,publishTime,addTime from DocumentSearch_EN where ( documentname Contains @matchString or documentname match @matchString or SEO_K Contains @matchString or SEO_K match @matchString ) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') and release='true' order by score desc");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
document.SEO_D = row["SEO_D"].ToString();
document.PDF = row["PDF"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime(row["addTime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment(key, document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
/// <summary>
///英文全站搜索
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<ESearch> EN_SearchAllList(int pagesize, int pageindex, string key, string isLevel, int typeid, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "WholeSearch_EN");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("WholeSearch_EN",
"Name", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
if (pagesize == 0)
{
strSql.Append("select ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
else
{
strSql.Append("select between @begin to @end ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
strSql.Append(" WholeSearch_EN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) and VisitLevel =''");
if (typeid > 0)
{
strSql.Append(" and typeid=@typeid");
}
strSql.Append(" order by score desc, lorder");
}
else
{
levelPara = isLevel.Split(',');
if (pagesize == 0)
{
strSql.Append("select ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
else
{
strSql.Append("select between @begin to @end ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
strSql.Append(" WholeSearch_EN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') ");
if (typeid > 0)
{
strSql.Append(" and typeid=@typeid");
}
strSql.Append(" order by score desc, lorder");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pageindex - 1) * pagesize);
adapter.SelectCommand.Parameters.Add("@end", pageindex * pagesize - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
adapter.SelectCommand.Parameters.Add("@typeid", typeid);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<ESearch> result = new List<ESearch>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
ESearch search = new ESearch();
search.ID = int.Parse(row["ID"].ToString());
//search.UNIID = new Guid(row["UNIID"].ToString());
search.Name = Strings.NoHTML(row["Name"].ToString());
search.SEO_D = row["SEO_D"].ToString();
//search.content = Strings.NoHTML(row["content"].ToString());
search.TypeID = int.Parse(row["TypeID"].ToString());
search.TypeName = row["TypeName"].ToString();
//search.TXT = Strings.NoHTML(row["TXT"].ToString());
if (!string.IsNullOrEmpty(row["addTime"].ToString().Trim()))
{
search.addTime = Convert.ToDateTime(row["addTime"].ToString());
}
else
{
search.addTime = Convert.ToDateTime("9999-12-31");
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
search.Abstract = contentHighlighter.GetBestFragment(key, search.SEO_D);
if (string.IsNullOrEmpty(search.Abstract))
{
search.Abstract = search.SEO_D;
}
search.TitleHighLighter = titleHighlighter.GetBestFragment(key, search.Name);
if (string.IsNullOrEmpty(search.TitleHighLighter))
{
search.TitleHighLighter = search.Name;
}
result.Add(search);
}
return result;
}
/// <summary>
/// 中文文档搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchDocListNew(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "DocumentSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch",
"Documentname", (key.Split('_')[0]).ToString(), int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
//if (key.Split('_')[1] == "1")
//{
// if (string.IsNullOrEmpty(isLevel))
// {
// // strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by score desc");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc");
// }
// else
// {
// levelPara = isLevel.Split(',');
// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");
// for (int i = 0; i < levelPara.Length; i++)
// {
// strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
// }
// strSql.Remove(strSql.Length - 3, 2);
// strSql.Append(" or VisitLevel='') and release='true' order by score desc");
// }
//}
//else if (key.Split('_')[1] == "2")
//{
// if (string.IsNullOrEmpty(isLevel))
// {
// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by publishTime desc");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by publishTime desc");
// }
// else
// {
// levelPara = isLevel.Split(',');
// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");
// for (int i = 0; i < levelPara.Length; i++)
// {
// strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
// }
// strSql.Remove(strSql.Length - 3, 2);
// strSql.Append(" or VisitLevel='') and release='true' order by publishTime desc");
// }
//}
//else
//{
if (string.IsNullOrEmpty(isLevel))
{
//strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by score desc, publishTime desc");
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc, publishTime desc");
}
else
{
levelPara = isLevel.Split(',');
//strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') and release='true' order by score desc, publishTime desc ");
}
//}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
document.SEO_D = row["SEO_D"].ToString();
document.PDF = row["PDF"].ToString();
document.htmlURL = row["htmlURL"].ToString();
document.gid_collection = row["gid_collection"].ToString();
document.TitleHighLighter = row["DocumentName"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime(row["AddTime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment((key.Split('_')[0]).ToString(), document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
}
}
HubbleDotNet 使用类的更多相关文章
- Java类的继承与多态特性-入门笔记
相信对于继承和多态的概念性我就不在怎么解释啦!不管你是.Net还是Java面向对象编程都是比不缺少一堂课~~Net如此Java亦也有同样的思想成分包含其中. 继承,多态,封装是Java面向对象的3大特 ...
- HubbleDotNet 开源全文搜索数据库项目--为数据库现有表或视图建立全文索引(三) 多表关联全文索引模式
关系型数据库中,多表关联是很常见的事情,HubbleDotNet 可以对部分情况的多表关联形式建立关联的全文索引,这样用户就不需要专门建一个大表 来解决多表关联时的全文索引问题. 下面以 为数据库现有 ...
- C++ 可配置的类工厂
项目中常用到工厂模式,工厂模式可以把创建对象的具体细节封装到Create函数中,减少重复代码,增强可读和可维护性.传统的工厂实现如下: class Widget { public: virtual i ...
- Android请求网络共通类——Hi_博客 Android App 开发笔记
今天 ,来分享一下 ,一个博客App的开发过程,以前也没开发过这种类型App 的经验,求大神们轻点喷. 首先我们要创建一个Andriod 项目 因为要从网络请求数据所以我们先来一个请求网络的共通类. ...
- ASP.NET MVC with Entity Framework and CSS一书翻译系列文章之第二章:利用模型类创建视图、控制器和数据库
在这一章中,我们将直接进入项目,并且为产品和分类添加一些基本的模型类.我们将在Entity Framework的代码优先模式下,利用这些模型类创建一个数据库.我们还将学习如何在代码中创建数据库上下文类 ...
- ASP.NET Core 折腾笔记二:自己写个完整的Cache缓存类来支持.NET Core
背景: 1:.NET Core 已经没System.Web,也木有了HttpRuntime.Cache,因此,该空间下Cache也木有了. 2:.NET Core 有新的Memory Cache提供, ...
- .NET Core中间件的注册和管道的构建(2)---- 用UseMiddleware扩展方法注册中间件类
.NET Core中间件的注册和管道的构建(2)---- 用UseMiddleware扩展方法注册中间件类 0x00 为什么要引入扩展方法 有的中间件功能比较简单,有的则比较复杂,并且依赖其它组件.除 ...
- Java基础Map接口+Collections工具类
1.Map中我们主要讲两个接口 HashMap 与 LinkedHashMap (1)其中LinkedHashMap是有序的 怎么存怎么取出来 我们讲一下Map的增删改查功能: /* * Ma ...
- PHP-解析验证码类--学习笔记
1.开始 在 网上看到使用PHP写的ValidateCode生成验证码码类,感觉不错,特拿来分析学习一下. 2.类图 3.验证码类部分代码 3.1 定义变量 //随机因子 private $char ...
随机推荐
- mysql基础itcast笔记
1. 课程回顾 mysql基础 1)mysql存储结构: 数据库 -> 表 -> 数据 sql语句 2)管理数据库: 增加: create database 数据库 default c ...
- SELinux处理命令
1 查看SELinux状态 Enforcing为开启状态:Disabled为关闭状态. [root@localhost /]# getenforce Enforcing 2 临时关闭SELinux [ ...
- SpringMVC前置控制器SimpleUrlHandlerMapping配置
1. <?xml version="1.0" encoding="UTF-8"?> <web-app version="2.5&qu ...
- [poj1741]Tree(点分治+容斥原理)
题意:求树中点对距离<=k的无序点对个数. 解题关键:树上点分治,这个分治并没有传统分治的合并过程,只是分成各个小问题,并将各个小问题的答案相加即可,也就是每层的复杂度并不在合并的过程,是在每层 ...
- C# 32位程序在64位系统下运行中解决重定向问题
在64位的Windows操作系统中,为了兼容32位程序的运行,64位的Windows操作系统采用重定向机制.目的是为了能让32位程序在64位的操作系统不仅能操作关键文件文夹和关键的注册表并且又要避免与 ...
- FZU 2057 家谱(dfs)
Problem 2057 家谱 Accept: 129 Submit: 356Time Limit: 1000 mSec Memory Limit : 32768 KB Problem ...
- dpdk学习笔记2
一 了解dpdk准备知识 1 NAT NAT技术是为了缓解IPV4地址枯竭得问题,通过使用NAT技术,一个机构如学校可以只用单一得公网IP来范文互联网,在外界看来只有一台接入公网得设备.NAT分为两种 ...
- MySQL写入中文乱码
这点确实很迷,我的数据库属性确实设置了utf-8字符集,但写入中文还是乱码,后来是直接修改了全局配置才修改过来. 1.进入MySQL的本地安装路径,我的安装路径是"C:\Program Fi ...
- WCF大文件传输【转】
http://www.cnblogs.com/happygx/archive/2013/10/29/3393973.html WCF大文件传输 WCF传输文件的时候可以设置每次文件的传输大小,如果是小 ...
- UIButton和UINavigationItem设置图片和文字位置
1.UIButton设置文字位置 有些时候我们想让UIButton的title居左对齐,我们设置 btn.textLabel.textAlignment = UITextAlignmentLeft 是 ...