using System;

using System.Collections.Generic;

using System.Linq;

using System.Text;

using Hubble.SQLClient;

using System.Configuration;

using RJ.Entity.UI_A;

using System.Data;

using Hubble.Core.Analysis.HighLight;

using Hubble.Analyzer;

using RJ.DBConnection;

using RJ.DALS.UI_A;

using RJ.Common;

using RJ.Entity.UI_A.AboutRJ;

namespace RJ.DAL.UI_A

{

public class DHubble : UI_A_DALBaseClass

{

private static string _TitleAnalyzerName = null;

private static string _ContentAnalyzerName = null;

private const int CacheTimeout = 0; //In seconds

public static void GetAnalyzerName(HubbleAsyncConnection conn, string tableName)

{

if (_TitleAnalyzerName != null && _ContentAnalyzerName != null)

{

return;

}

string sql = string.Format("exec SP_Columns '{0}'", tableName.Replace("'", "''"));

HubbleCommand cmd = new HubbleCommand(sql, conn);

foreach (System.Data.DataRow row in cmd.Query().Tables[0].Rows)

{

if (row["FieldName"].ToString().Equals("Title", StringComparison.CurrentCultureIgnoreCase))

{

_TitleAnalyzerName = row["Analyzer"].ToString();

}

if (row["FieldName"].ToString().Equals("Content", StringComparison.CurrentCultureIgnoreCase))

{

_ContentAnalyzerName = row["Analyzer"].ToString();

}

}

}

/// <summary>

/// 搜索列表

/// </summary>

/// <param name="pageindex"></param>

/// <param name="pagesize"></param>

/// <param name="key"></param>

/// <param name="Count"></param>

/// <returns></returns>

public List<EArticle> SearchKnowList(int pageindex, int pagesize, string key, out int Count)

{

string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();

DataSet ds = new DataSet();

System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();

sw.Start();

using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))

{

conn.Open();

GetAnalyzerName(conn, "KnowSearch");

string wordssplitbyspace;

HubbleCommand matchCmd = new HubbleCommand(conn);

string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("KnowSearch",

"documentname", (key.Split('_')[0]).ToString(), int.MaxValue, out wordssplitbyspace);

HubbleDataAdapter adapter = new HubbleDataAdapter();

//if (key.Split('_')[1] == "1")

//{

// adapter.SelectCommand = new HubbleCommand("select between @begin to @end * from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by score desc",

// conn);

//}

//else if (key.Split('_')[1] == "2")

//{

// adapter.SelectCommand = new HubbleCommand("select between @begin to @end ID,UNIID,DocumentName,publishTime from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by publishTime desc",

// conn);

//}

//else

//{

adapter.SelectCommand = new HubbleCommand("select between @begin to @end ID,UNIID,DocumentName,publishTime from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by score desc,publishTime desc",

conn);

//}

adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);

adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);

adapter.SelectCommand.Parameters.Add("@matchString", matchString);

adapter.SelectCommand.CacheTimeout = CacheTimeout;

ds = new System.Data.DataSet();

//adapter.Fill(ds);

HubbleCommand cmd = adapter.SelectCommand;

ds = cmd.Query(CacheTimeout);

//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);

//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);

}

Count = ds.Tables[0].MinimumCapacity;

List<EArticle> result = new List<EArticle>();

foreach (System.Data.DataRow row in ds.Tables[0].Rows)

{

EArticle document = new EArticle();

document.ID = int.Parse(row["ID"].ToString());

document.UNIID = row["UNIID"].ToString();

document.Name = row["DocumentName"].ToString();

if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))

{

document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());

}

else

{

document.PublishTime = Convert.ToDateTime("9999-12-31");

}

SimpleHTMLFormatter simpleHTMLFormatter =

new SimpleHTMLFormatter("<font color=\"red\">", "</font>");

Highlighter titleHighlighter;

Highlighter contentHighlighter;

titleHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

contentHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

titleHighlighter.FragmentSize = 100;

contentHighlighter.FragmentSize = 100;

string lightname = titleHighlighter.GetBestFragment((key.Split('_')[0]).ToString(), document.Name);

if (string.IsNullOrEmpty(lightname))

{

document.TitleHighLighter = document.Name;

}

else

{

document.TitleHighLighter = lightname;

}

result.Add(document);

}

sw.Stop();

return result;

}

/// <summary>

/// 文档搜索列表

/// </summary>

/// <param name="pageindex"></param>

/// <param name="pagesize"></param>

/// <param name="key"></param>

/// <param name="Count"></param>

/// <returns></returns>

public List<EArticle> SearchDocList(int pageindex, int pagesize, string key, string isLevel, out int Count)

{

string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();

DataSet ds = new DataSet();

//string sql = "select * from News where Title match '" + key + "'";

//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);

System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();

sw.Start();

using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))

{

conn.Open();

GetAnalyzerName(conn, "DocumentSearch");

string wordssplitbyspace;

HubbleCommand matchCmd = new HubbleCommand(conn);

string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch",

"documentname", key, int.MaxValue, out wordssplitbyspace);

HubbleDataAdapter adapter = new HubbleDataAdapter();

StringBuilder strSql = new StringBuilder();

string[] levelPara;

if (string.IsNullOrEmpty(isLevel))

{

strSql.Append("select between @begin to @end id,UNIID,documentname,htmlurl,publishtime,addtime,SEO_D,PDF from DocumentSearch where ( documentname Contains @matchString or Content Contains @matchString or documentname match @matchString or Content match @matchString ) and VisitLevel ='' and release='true' order by score desc");

}

else

{

levelPara = isLevel.Split(',');

strSql.Append("select between @begin to @end id,UNIID,documentname,htmlurl,publishtime,addtime,SEO_D,PDF from DocumentSearch where ( documentname Contains @matchString or Content Contains @matchString or documentname match @matchString or Content match @matchString ) and (");

for (int i = 0; i < levelPara.Length; i++)

{

strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");

}

strSql.Remove(strSql.Length - 3, 2);

strSql.Append(" or VisitLevel='') and release='true' order by score desc");

}

adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);

adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);

adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);

adapter.SelectCommand.Parameters.Add("@matchString", matchString);

if (!string.IsNullOrEmpty(isLevel))

{

for (int i = 0; i < isLevel.Split(',').Length; i++)

{

adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));

}

}

adapter.SelectCommand.CacheTimeout = CacheTimeout;

ds = new System.Data.DataSet();

//adapter.Fill(ds);

HubbleCommand cmd = adapter.SelectCommand;

ds = cmd.Query(CacheTimeout);

//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);

//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);

}

Count = ds.Tables[0].MinimumCapacity;

List<EArticle> result = new List<EArticle>();

foreach (System.Data.DataRow row in ds.Tables[0].Rows)

{

EArticle document = new EArticle();

document.ID = int.Parse(row["ID"].ToString());

document.UNIID = row["UNIID"].ToString();

document.Name = row["DocumentName"].ToString();

document.htmlURL = row["htmlURL"].ToString();

document.SEO_D = row["SEO_D"].ToString();

document.PDF = row["PDF"].ToString();

if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))

{

document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());

}

else

{

document.PublishTime = Convert.ToDateTime(row["AddTime"].ToString());

}

SimpleHTMLFormatter simpleHTMLFormatter =

new SimpleHTMLFormatter("<font color=\"red\">", "</font>");

Highlighter titleHighlighter;

Highlighter contentHighlighter;

titleHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

contentHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

titleHighlighter.FragmentSize = 100;

contentHighlighter.FragmentSize = 100;

string lightname = titleHighlighter.GetBestFragment(key, document.Name);

if (string.IsNullOrEmpty(lightname))

{

document.TitleHighLighter = document.Name;

}

else

{

document.TitleHighLighter = lightname;

}

result.Add(document);

}

sw.Stop();

return result;

}

/// <summary>

/// 软件搜索列表

/// </summary>

/// <param name="pageindex"></param>

/// <param name="pagesize"></param>

/// <param name="key"></param>

/// <param name="Count"></param>

/// <returns></returns>

public List<EArticle> SearchSoftWareList(int pageindex, int pagesize, string key, string isLevel, out int Count)

{

string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();

DataSet ds = new DataSet();

//string sql = "select * from News where Title match '" + key + "'";

//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);

System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();

sw.Start();

using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))

{

conn.Open();

GetAnalyzerName(conn, "SoftwareSearch");

string wordssplitbyspace;

HubbleCommand matchCmd = new HubbleCommand(conn);

string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("SoftwareSearch",

"DocumentName", key, int.MaxValue, out wordssplitbyspace);

HubbleDataAdapter adapter = new HubbleDataAdapter();

StringBuilder strSql = new StringBuilder();

string[] levelPara;

if (string.IsNullOrEmpty(isLevel))

{

strSql.Append("select between @begin to @end id,documentname,publishtime,addtime,UNIID,IsCheckPartner from SoftwareSearch where (DocumentName Contains @matchString or DocumentName match @matchString) and (VisitLevel ='' or visitlevel='JingBiaoLevel') and release='true' order by score desc");

}

else

{

levelPara = isLevel.Split(',');

strSql.Append("select between @begin to @end id,documentname,publishtime,addtime,UNIID,IsCheckPartner from SoftwareSearch where (DocumentName Contains @matchString or DocumentName match @matchString) and (");

for (int i = 0; i < levelPara.Length; i++)

{

strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");

}

strSql.Remove(strSql.Length - 3, 2);

strSql.Append(" or VisitLevel='' or visitlevel='JingBiaoLevel') and release='true' order by score desc");

}

adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);

adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);

adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);

adapter.SelectCommand.Parameters.Add("@matchString", matchString);

if (!string.IsNullOrEmpty(isLevel))

{

for (int i = 0; i < isLevel.Split(',').Length; i++)

{

adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));

}

}

adapter.SelectCommand.CacheTimeout = CacheTimeout;

ds = new System.Data.DataSet();

//adapter.Fill(ds);

HubbleCommand cmd = adapter.SelectCommand;

ds = cmd.Query(CacheTimeout);

//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);

//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);

}

Count = ds.Tables[0].MinimumCapacity;

List<EArticle> result = new List<EArticle>();

foreach (System.Data.DataRow row in ds.Tables[0].Rows)

{

EArticle software = new EArticle();

software.ID = int.Parse(row["ID"].ToString());

software.Name = row["DocumentName"].ToString();

software.UpdateTime = Convert.ToDateTime(row["AddTime"].ToString());

software.UNIID = row["UNIID"].ToString();

software.IsCheckPartner = Convert.ToBoolean(row["IsCheckPartner"]);

if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))

{

software.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());

}

else

{

software.PublishTime = Convert.ToDateTime(row["Addtime"].ToString());

}

SimpleHTMLFormatter simpleHTMLFormatter =

new SimpleHTMLFormatter("<font color=\"red\">", "</font>");

Highlighter titleHighlighter;

Highlighter contentHighlighter;

titleHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

contentHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

titleHighlighter.FragmentSize = 100;

contentHighlighter.FragmentSize = 100;

software.TitleHighLighter = titleHighlighter.GetBestFragment(key, software.Name);

if (string.IsNullOrEmpty(software.TitleHighLighter))

{

software.TitleHighLighter = software.Name;

}

result.Add(software);

}

sw.Stop();

return result;

}

/// <summary>

///全站搜索

/// </summary>

/// <param name="pageindex"></param>

/// <param name="pagesize"></param>

/// <param name="key"></param>

/// <param name="Count"></param>

/// <returns></returns>

public List<ESearch> SearchAllList(int pagesize, int pageindex, string key, string isLevel, int typeid, out int Count, out DataTable dtcount)

{

string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();

DataSet ds = new DataSet();

using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))

{

conn.Open();

GetAnalyzerName(conn, "WholeSearch_CN");

string wordssplitbyspace;

HubbleCommand matchCmd = new HubbleCommand(conn);

string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("WholeSearch_CN",

"Name", key, int.MaxValue, out wordssplitbyspace);

HubbleDataAdapter adapter = new HubbleDataAdapter();

StringBuilder strSql = new StringBuilder();

string[] levelPara;

strSql.Append(" select ");

if (pagesize != 0)

{

strSql.Append(" between @begin to @end ID,Name,TypeID,SEO_D,TypeName,strURL,addTime from ");

}

strSql.Append(" WholeSearch_CN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) ");

if (string.IsNullOrEmpty(isLevel))

{

strSql.Append(" and (VisitLevel ='' or visitlevel='JingBiaoLevel')");

}

else

{

levelPara = isLevel.Split(',');

strSql.Append(" and (");

for (int i = 0; i < levelPara.Length; i++)

{

strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");

}

strSql.Remove(strSql.Length - 3, 2);

strSql.Append(" or VisitLevel='' or visitlevel='JingBiaoLevel') ");

}

if (typeid > 0)

{

strSql.Append(" and typeid=@typeid");

}

else

{

strSql.Insert(0, " [GroupBy('Count', 'ID', 'TypeID', 10)] ");

}

strSql.Append(" order by score desc, lorder");

adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);

adapter.SelectCommand.Parameters.Add("@begin", (pageindex - 1) * pagesize);

adapter.SelectCommand.Parameters.Add("@end", pageindex * pagesize - 1);

adapter.SelectCommand.Parameters.Add("@matchString", matchString);

adapter.SelectCommand.Parameters.Add("@typeid", typeid);

if (!string.IsNullOrEmpty(isLevel))

{

for (int i = 0; i < isLevel.Split(',').Length; i++)

{

adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));

}

}

adapter.SelectCommand.CacheTimeout = CacheTimeout;

ds = new System.Data.DataSet();

//adapter.Fill(ds);

HubbleCommand cmd = adapter.SelectCommand;

ds = cmd.Query(CacheTimeout);

//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);

//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);

}

Count = ds.Tables[0].MinimumCapacity;

if (ds.Tables.Count > 1)

{

dtcount = ds.Tables[1];

}

else

{

dtcount = null;

}

List<ESearch> result = new List<ESearch>();

foreach (System.Data.DataRow row in ds.Tables[0].Rows)

{

ESearch search = new ESearch();

search.ID = int.Parse(row["ID"].ToString());

//search.UNIID = new Guid(row["UNIID"].ToString());

search.Name = Strings.NoHTML(row["Name"].ToString());

search.SEO_D = row["SEO_D"].ToString();

search.strURL = row["strURL"].ToString();

if (row["TypeID"].ToString() != "")

{

search.TypeID = int.Parse(row["TypeID"].ToString());

search.TypeName = row["TypeName"].ToString();

}

if (!string.IsNullOrEmpty(row["addTime"].ToString().Trim()))

{

search.addTime = Convert.ToDateTime(row["addTime"].ToString());

}

else

{

search.addTime = Convert.ToDateTime("9999-12-31");

}

SimpleHTMLFormatter simpleHTMLFormatter =

new SimpleHTMLFormatter("<font color=\"red\">", "</font>");

Highlighter titleHighlighter;

Highlighter contentHighlighter;

titleHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

contentHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

titleHighlighter.FragmentSize = 100;

contentHighlighter.FragmentSize = 100;

search.Abstract = contentHighlighter.GetBestFragment(key, search.SEO_D);

if (string.IsNullOrEmpty(search.Abstract))

{

search.Abstract = search.SEO_D;

}

search.TitleHighLighter = titleHighlighter.GetBestFragment(key, search.Name);

if (string.IsNullOrEmpty(search.TitleHighLighter))

{

search.TitleHighLighter = search.Name;

}

result.Add(search);

}

return result;

}

/// <summary>

/// 英文文档搜索列表

/// </summary>

/// <param name="pageindex"></param>

/// <param name="pagesize"></param>

/// <param name="key"></param>

/// <param name="Count"></param>

/// <returns></returns>

public List<EArticle> EN_SearchDocList(int pageindex, int pagesize, string key, string isLevel, out int Count)

{

string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();

DataSet ds = new DataSet();

//string sql = "select * from News where Title match '" + key + "'";

//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);

System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();

sw.Start();

using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))

{

conn.Open();

GetAnalyzerName(conn, "DocumentSearch_EN");

string wordssplitbyspace;

HubbleCommand matchCmd = new HubbleCommand(conn);

string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch_EN",

"documentname", key, int.MaxValue, out wordssplitbyspace);

HubbleDataAdapter adapter = new HubbleDataAdapter();

StringBuilder strSql = new StringBuilder();

string[] levelPara;

if (string.IsNullOrEmpty(isLevel))

{

strSql.Append("select between @begin to @end ID,UNIID,DocumentName,PDF,SEO_D,publishTime,addTime from DocumentSearch_EN where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc");

}

else

{

levelPara = isLevel.Split(',');

strSql.Append("select between @begin to @end ID,UNIID,DocumentName,PDF,SEO_D,publishTime,addTime from DocumentSearch_EN where ( documentname Contains @matchString or documentname match @matchString or SEO_K Contains @matchString or SEO_K match @matchString ) and (");

for (int i = 0; i < levelPara.Length; i++)

{

strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");

}

strSql.Remove(strSql.Length - 3, 2);

strSql.Append(" or VisitLevel='') and release='true' order by score desc");

}

adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);

adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);

adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);

adapter.SelectCommand.Parameters.Add("@matchString", matchString);

if (!string.IsNullOrEmpty(isLevel))

{

for (int i = 0; i < isLevel.Split(',').Length; i++)

{

adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));

}

}

adapter.SelectCommand.CacheTimeout = CacheTimeout;

ds = new System.Data.DataSet();

//adapter.Fill(ds);

HubbleCommand cmd = adapter.SelectCommand;

ds = cmd.Query(CacheTimeout);

//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);

//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);

}

Count = ds.Tables[0].MinimumCapacity;

List<EArticle> result = new List<EArticle>();

foreach (System.Data.DataRow row in ds.Tables[0].Rows)

{

EArticle document = new EArticle();

document.ID = int.Parse(row["ID"].ToString());

document.UNIID = row["UNIID"].ToString();

document.Name = row["DocumentName"].ToString();

document.SEO_D = row["SEO_D"].ToString();

document.PDF = row["PDF"].ToString();

if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))

{

document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());

}

else

{

document.PublishTime = Convert.ToDateTime(row["addTime"].ToString());

}

SimpleHTMLFormatter simpleHTMLFormatter =

new SimpleHTMLFormatter("<font color=\"red\">", "</font>");

Highlighter titleHighlighter;

Highlighter contentHighlighter;

titleHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

contentHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

titleHighlighter.FragmentSize = 100;

contentHighlighter.FragmentSize = 100;

string lightname = titleHighlighter.GetBestFragment(key, document.Name);

if (string.IsNullOrEmpty(lightname))

{

document.TitleHighLighter = document.Name;

}

else

{

document.TitleHighLighter = lightname;

}

result.Add(document);

}

sw.Stop();

return result;

}

/// <summary>

///英文全站搜索

/// </summary>

/// <param name="pageindex"></param>

/// <param name="pagesize"></param>

/// <param name="key"></param>

/// <param name="Count"></param>

/// <returns></returns>

public List<ESearch> EN_SearchAllList(int pagesize, int pageindex, string key, string isLevel, int typeid, out int Count)

{

string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();

DataSet ds = new DataSet();

using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))

{

conn.Open();

GetAnalyzerName(conn, "WholeSearch_EN");

string wordssplitbyspace;

HubbleCommand matchCmd = new HubbleCommand(conn);

string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("WholeSearch_EN",

"Name", key, int.MaxValue, out wordssplitbyspace);

HubbleDataAdapter adapter = new HubbleDataAdapter();

StringBuilder strSql = new StringBuilder();

string[] levelPara;

if (string.IsNullOrEmpty(isLevel))

{

if (pagesize == 0)

{

strSql.Append("select ID,Name,TypeID,SEO_D,TypeName,addTime from ");

}

else

{

strSql.Append("select between @begin to @end ID,Name,TypeID,SEO_D,TypeName,addTime from ");

}

strSql.Append(" WholeSearch_EN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) and VisitLevel =''");

if (typeid > 0)

{

strSql.Append(" and typeid=@typeid");

}

strSql.Append(" order by score desc, lorder");

}

else

{

levelPara = isLevel.Split(',');

if (pagesize == 0)

{

strSql.Append("select ID,Name,TypeID,SEO_D,TypeName,addTime from ");

}

else

{

strSql.Append("select between @begin to @end ID,Name,TypeID,SEO_D,TypeName,addTime from ");

}

strSql.Append(" WholeSearch_EN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) and (");

for (int i = 0; i < levelPara.Length; i++)

{

strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");

}

strSql.Remove(strSql.Length - 3, 2);

strSql.Append(" or VisitLevel='') ");

if (typeid > 0)

{

strSql.Append(" and typeid=@typeid");

}

strSql.Append(" order by score desc, lorder");

}

adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);

adapter.SelectCommand.Parameters.Add("@begin", (pageindex - 1) * pagesize);

adapter.SelectCommand.Parameters.Add("@end", pageindex * pagesize - 1);

adapter.SelectCommand.Parameters.Add("@matchString", matchString);

adapter.SelectCommand.Parameters.Add("@typeid", typeid);

if (!string.IsNullOrEmpty(isLevel))

{

for (int i = 0; i < isLevel.Split(',').Length; i++)

{

adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));

}

}

adapter.SelectCommand.CacheTimeout = CacheTimeout;

ds = new System.Data.DataSet();

//adapter.Fill(ds);

HubbleCommand cmd = adapter.SelectCommand;

ds = cmd.Query(CacheTimeout);

//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);

//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);

}

Count = ds.Tables[0].MinimumCapacity;

List<ESearch> result = new List<ESearch>();

foreach (System.Data.DataRow row in ds.Tables[0].Rows)

{

ESearch search = new ESearch();

search.ID = int.Parse(row["ID"].ToString());

//search.UNIID = new Guid(row["UNIID"].ToString());

search.Name = Strings.NoHTML(row["Name"].ToString());

search.SEO_D = row["SEO_D"].ToString();

//search.content = Strings.NoHTML(row["content"].ToString());

search.TypeID = int.Parse(row["TypeID"].ToString());

search.TypeName = row["TypeName"].ToString();

//search.TXT = Strings.NoHTML(row["TXT"].ToString());

if (!string.IsNullOrEmpty(row["addTime"].ToString().Trim()))

{

search.addTime = Convert.ToDateTime(row["addTime"].ToString());

}

else

{

search.addTime = Convert.ToDateTime("9999-12-31");

}

SimpleHTMLFormatter simpleHTMLFormatter =

new SimpleHTMLFormatter("<font color=\"red\">", "</font>");

Highlighter titleHighlighter;

Highlighter contentHighlighter;

titleHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

contentHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

titleHighlighter.FragmentSize = 100;

contentHighlighter.FragmentSize = 100;

search.Abstract = contentHighlighter.GetBestFragment(key, search.SEO_D);

if (string.IsNullOrEmpty(search.Abstract))

{

search.Abstract = search.SEO_D;

}

search.TitleHighLighter = titleHighlighter.GetBestFragment(key, search.Name);

if (string.IsNullOrEmpty(search.TitleHighLighter))

{

search.TitleHighLighter = search.Name;

}

result.Add(search);

}

return result;

}

/// <summary>

/// 中文文档搜索列表

/// </summary>

/// <param name="pageindex"></param>

/// <param name="pagesize"></param>

/// <param name="key"></param>

/// <param name="Count"></param>

/// <returns></returns>

public List<EArticle> SearchDocListNew(int pageindex, int pagesize, string key, string isLevel, out int Count)

{

string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();

DataSet ds = new DataSet();

//string sql = "select * from News where Title match '" + key + "'";

//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);

System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();

sw.Start();

using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))

{

conn.Open();

GetAnalyzerName(conn, "DocumentSearch");

string wordssplitbyspace;

HubbleCommand matchCmd = new HubbleCommand(conn);

string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch",

"Documentname", (key.Split('_')[0]).ToString(), int.MaxValue, out wordssplitbyspace);

HubbleDataAdapter adapter = new HubbleDataAdapter();

StringBuilder strSql = new StringBuilder();

string[] levelPara;

//if (key.Split('_')[1] == "1")

//{

// if (string.IsNullOrEmpty(isLevel))

// {

// // strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by score desc");

// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc");

// }

// else

// {

// levelPara = isLevel.Split(',');

// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");

// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");

// for (int i = 0; i < levelPara.Length; i++)

// {

// strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");

// }

// strSql.Remove(strSql.Length - 3, 2);

// strSql.Append(" or VisitLevel='') and release='true' order by score desc");

// }

//}

//else if (key.Split('_')[1] == "2")

//{

// if (string.IsNullOrEmpty(isLevel))

// {

// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by publishTime desc");

// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by publishTime desc");

// }

// else

// {

// levelPara = isLevel.Split(',');

// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");

// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");

// for (int i = 0; i < levelPara.Length; i++)

// {

// strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");

// }

// strSql.Remove(strSql.Length - 3, 2);

// strSql.Append(" or VisitLevel='') and release='true' order by publishTime desc");

// }

//}

//else

//{

if (string.IsNullOrEmpty(isLevel))

{

//strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by score desc, publishTime desc");

strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc, publishTime desc");

}

else

{

levelPara = isLevel.Split(',');

//strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");

strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");

for (int i = 0; i < levelPara.Length; i++)

{

strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");

}

strSql.Remove(strSql.Length - 3, 2);

strSql.Append(" or VisitLevel='') and release='true' order by score desc, publishTime desc ");

}

//}

adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);

adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);

adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);

adapter.SelectCommand.Parameters.Add("@matchString", matchString);

if (!string.IsNullOrEmpty(isLevel))

{

for (int i = 0; i < isLevel.Split(',').Length; i++)

{

adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));

}

}

adapter.SelectCommand.CacheTimeout = CacheTimeout;

ds = new System.Data.DataSet();

//adapter.Fill(ds);

HubbleCommand cmd = adapter.SelectCommand;

ds = cmd.Query(CacheTimeout);

//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);

//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);

}

Count = ds.Tables[0].MinimumCapacity;

List<EArticle> result = new List<EArticle>();

foreach (System.Data.DataRow row in ds.Tables[0].Rows)

{

EArticle document = new EArticle();

document.ID = int.Parse(row["ID"].ToString());

document.UNIID = row["UNIID"].ToString();

document.Name = row["DocumentName"].ToString();

document.SEO_D = row["SEO_D"].ToString();

document.PDF = row["PDF"].ToString();

document.htmlURL = row["htmlURL"].ToString();

document.gid_collection = row["gid_collection"].ToString();

document.TitleHighLighter = row["DocumentName"].ToString();

if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))

{

document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());

}

else

{

document.PublishTime = Convert.ToDateTime(row["AddTime"].ToString());

}

SimpleHTMLFormatter simpleHTMLFormatter =

new SimpleHTMLFormatter("<font color=\"red\">", "</font>");

Highlighter titleHighlighter;

Highlighter contentHighlighter;

titleHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

contentHighlighter =

new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());

titleHighlighter.FragmentSize = 100;

contentHighlighter.FragmentSize = 100;

string lightname = titleHighlighter.GetBestFragment((key.Split('_')[0]).ToString(), document.Name);

if (string.IsNullOrEmpty(lightname))

{

document.TitleHighLighter = document.Name;

}

else

{

document.TitleHighLighter = lightname;

}

result.Add(document);

}

sw.Stop();

return result;

}

}

}

HubbleDotNet 使用类的更多相关文章

  1. Java类的继承与多态特性-入门笔记

    相信对于继承和多态的概念性我就不在怎么解释啦!不管你是.Net还是Java面向对象编程都是比不缺少一堂课~~Net如此Java亦也有同样的思想成分包含其中. 继承,多态,封装是Java面向对象的3大特 ...

  2. HubbleDotNet 开源全文搜索数据库项目--为数据库现有表或视图建立全文索引(三) 多表关联全文索引模式

    关系型数据库中,多表关联是很常见的事情,HubbleDotNet 可以对部分情况的多表关联形式建立关联的全文索引,这样用户就不需要专门建一个大表 来解决多表关联时的全文索引问题. 下面以 为数据库现有 ...

  3. C++ 可配置的类工厂

    项目中常用到工厂模式,工厂模式可以把创建对象的具体细节封装到Create函数中,减少重复代码,增强可读和可维护性.传统的工厂实现如下: class Widget { public: virtual i ...

  4. Android请求网络共通类——Hi_博客 Android App 开发笔记

    今天 ,来分享一下 ,一个博客App的开发过程,以前也没开发过这种类型App 的经验,求大神们轻点喷. 首先我们要创建一个Andriod 项目 因为要从网络请求数据所以我们先来一个请求网络的共通类. ...

  5. ASP.NET MVC with Entity Framework and CSS一书翻译系列文章之第二章:利用模型类创建视图、控制器和数据库

    在这一章中,我们将直接进入项目,并且为产品和分类添加一些基本的模型类.我们将在Entity Framework的代码优先模式下,利用这些模型类创建一个数据库.我们还将学习如何在代码中创建数据库上下文类 ...

  6. ASP.NET Core 折腾笔记二:自己写个完整的Cache缓存类来支持.NET Core

    背景: 1:.NET Core 已经没System.Web,也木有了HttpRuntime.Cache,因此,该空间下Cache也木有了. 2:.NET Core 有新的Memory Cache提供, ...

  7. .NET Core中间件的注册和管道的构建(2)---- 用UseMiddleware扩展方法注册中间件类

    .NET Core中间件的注册和管道的构建(2)---- 用UseMiddleware扩展方法注册中间件类 0x00 为什么要引入扩展方法 有的中间件功能比较简单,有的则比较复杂,并且依赖其它组件.除 ...

  8. Java基础Map接口+Collections工具类

    1.Map中我们主要讲两个接口 HashMap  与   LinkedHashMap (1)其中LinkedHashMap是有序的  怎么存怎么取出来 我们讲一下Map的增删改查功能: /* * Ma ...

  9. PHP-解析验证码类--学习笔记

    1.开始 在 网上看到使用PHP写的ValidateCode生成验证码码类,感觉不错,特拿来分析学习一下. 2.类图 3.验证码类部分代码 3.1  定义变量 //随机因子 private $char ...

随机推荐

  1. 「NOIP2006」「LuoguP1064」 金明的预算方案(分组背包

    题目描述 金明今天很开心,家里购置的新房就要领钥匙了,新房里有一间金明自己专用的很宽敞的房间.更让他高兴的是,妈妈昨天对他说:“你的房间需要购买哪些物品,怎么布置,你说了算,只要不超过NNN元钱就行” ...

  2. C# 调用SQL的存储过程的接口及实现

    1. 接口为ExecuteStoredProcedure(string storedProcedureName, params ObjectParameter[] parameters) 2. 参数为 ...

  3. Linux的学习思路

    自学嵌入式确实不大现实(当然也不是说没有这个可能),毕竟嵌入式难度也是比较大的. 嵌入式的应用主要是几个方向, 一是系统开发:侧重开发环境搭建.内核原理.交叉编译等: 二是嵌入式Linux应用开发:侧 ...

  4. hadoop之一:概念和整体架构

    什么是hadoop? Apache Hadoop是一款支持数据密集型分布式应用并以Apache 2.0许可协议发布的开源软件框架.它支持在商品硬件构建的大型集群上运行的应用程序.Hadoop是根据Go ...

  5. warning: conflicting types for built-in function 'puts'

    warning: conflicting types for built-in function 'puts' [编译器版本] arm-linux-gcc 3.4.1 [问题描述] 在做嵌入式底层开发 ...

  6. Openstack web 添加和删除按钮

    注:当前已经时候用smaba将openstack环境的源码共享到windows系统上,并使用pycharm进行代码编辑和修改(参见openstack开发环境搭建).如下图:

  7. sqlServer对内存的管理

    简介 理解SQL Server对于内存的管理是对于SQL Server问题处理和性能调优的基本,本篇文章讲述SQL Server对于内存管理的内存原理. 二级存储(secondary storage) ...

  8. Python中生成随机数

    目录 1. random模块 1.1 设置随机种子 1.2 random模块中的方法 1.3 使用:生成整形随机数 1.3 使用:生成序列随机数 1.4 使用:生成随机实值分布 2. numpy.ra ...

  9. 51nod1127(尺取法)

    题目链接:https://www.51nod.com/onlineJudge/questionCode.html#!problemId=1127 题意:中文题诶- 思路:尺取法 维护一个队列,若当前队 ...

  10. python的编码问题整理

    一.编码和解码 1.编码(encode):将人类可以识别的语言(英文.中文等)转化成机器语言(01串)的过程,用于存储. 2.解码(decode):将机器语言转化成人类可识别的语言的过程,用于显示. ...