#include <stdio.h>
#include <stdlib.h>
#include <math.h>
/******************************************************************************
Branch Prediction Simulator
(c) M. Lutfi, 2009
******************************************************************************/
#define ROW_MAJOR_ADDR(A, r, c, n) ((A) + (r)*(n) + (c))
typedef enum {
not_taken = 0,
taken
} branch_t;
typedef enum {
strongly_not_taken = 0,
weakly_not_taken,
weakly_taken,
strongly_taken
} branch_state_t;
typedef struct {
unsigned int correctPredictionCount;
branch_state_t state;
unsigned int n_iter;
} lbp_t;
enum {
BRANCH_A = 0,
BRANCH_B,
BRANCH_C,
BRANCH_D,
N_OF_BRANCHES
};
#define NBITS_OF_GBH 3 // number of bits in Global History register
lbp_t lbp_table[NBITS_OF_GBH][N_OF_BRANCHES];
unsigned char gbh=0; // global branch history register (3-bit only)
branch_t last_outcome = not_taken;
char* get_branch_state_str(branch_t b)
{
if (b == not_taken)
return "not_taken";
else
return "taken";
}
void update_gbh(void)
{
#if 0
gbh = (gbh<<1) | last_outcome;
gbh &= ((1<printf("last_outcome = %s\n", get_branch_state_str(last_outcome));
printf("gbh = %0X\n", gbh);
#endif
}
/* Local Branch Prediction FSM */
void lbp_update(lbp_t *lbp, branch_t outcome)
{
switch (lbp->state) {
case strongly_not_taken:
if (outcome == taken) {
lbp->state = weakly_not_taken;
} else {
lbp->correctPredictionCount++;
}
break;
case weakly_not_taken:
if (outcome == taken) {
lbp->state = weakly_taken;
} else {
lbp->state = strongly_not_taken;
}
break;
case weakly_taken:
if (outcome == taken) {
lbp->state = strongly_taken;
} else
lbp->state = weakly_not_taken;
break;
case strongly_taken:
if (outcome == taken) {
lbp->correctPredictionCount++;
} else {
// the outcome is NT
lbp->state = weakly_taken;
}
break;
}
lbp->n_iter++;
}
int fun(double *p, int n)
{
int r = 0;
int c = 0;
double *pA;
while (r <= n-1) {
last_outcome = not_taken;
update_gbh();
lbp_update(&lbp_table[gbh][BRANCH_A], not_taken);
while (c <= n-1) {
last_outcome = not_taken;
update_gbh();
pA = ROW_MAJOR_ADDR(p, r, c, n);
if (r < c) {
lbp_update(&lbp_table[gbh][BRANCH_C], not_taken);
*pA = 2* (*pA) + 1;
last_outcome = not_taken;
} else {
lbp_update(&lbp_table[gbh][BRANCH_C], taken);
last_outcome = taken;
}
if (r > c) {
lbp_update(&lbp_table[gbh][BRANCH_D], not_taken);
*pA = 2* (*pA) - 1;
last_outcome = not_taken;
} else {
lbp_update(&lbp_table[gbh][BRANCH_D], taken);
last_outcome = taken;
}
++c;
}
lbp_update(&lbp_table[gbh][BRANCH_B], taken);
last_outcome = taken;
update_gbh();
++r;
}
lbp_update(&lbp_table[gbh][BRANCH_A], taken);
last_outcome = taken;
update_gbh();
return 0;
}
void print_A(double *p, int n)
{
int i,j;
double *pA;
for (i=0; i<n; i++) {
for(j=0; j<n; j++) {
pA = ROW_MAJOR_ADDR(p, i, j, n);
if (!pA) return;
printf("%d: &A[%d][%d] = %0X,\t", __LINE__, i, j, pA);
printf("%d: A[%d][%d] = %lf\n", __LINE__, i, j, *pA);
}
}
}
void init_gbh(void)
{
int i,j;
lbp_t *lbpP;
gbh = 0;
for(i=0; i<(NBITS_OF_GBH<<3); i++) {
lbpP = (lbp_t*)&lbp_table[i];
for(j=0; j<N_OF_BRANCHES; j++) {
lbpP[j].correctPredictionCount = 0;
lbpP[j].state = weakly_not_taken;
lbpP[j].n_iter = 0;
}
}
}
int main(void)
{
#define N 50
double A[N][N];
int i,j;
init_gbh();
printf("sizeof(double) = %d\n", sizeof(double));
printf("%d: &A = %0X\n", __LINE__, &A[0][0]);
for(i=0; i<N; i++) {
for(j=0; j<N; j++) {
A[i][j] = ROW_MAJOR_ADDR(1, i, j, N);
}
}
fun(&A[0][0], N);
printf("gbh = %0X\n", gbh);
for(i=0; i<N_OF_BRANCHES; i++) {
printf("lbp[%d] iteration = %d\n", i, lbp_table[gbh][i].n_iter);
printf("lbp[%d] correct prediction count = %d\n",
i, lbp_table[gbh][i].correctPredictionCount);
printf("Correct Prediction Rate = %4.2lf%\n\n",
((double)(lbp_table[gbh][i].correctPredictionCount)/(double)lbp_table[gbh][i].n_iter) *100.0);
}
}
Friday, May 8, 2009
Branch Prediction Algorithm
Monday, May 4, 2009
ARM EABI Environment
Here are the links for some useful packages:
http://armel.applieddata.net/developers/linux/eabi/armel-root-fs.tar.bz2
http://armel.applieddata.net/developers/linux/eabi/armel-root-fs.tar.bz2.md5
http://wiki.debian.org/ArmEabiPort
CodeSourcery GNU: http://www.codesourcery.com/sgpp/lite/arm/portal/release642
Monday, April 27, 2009
Sci-Tech Friendly President
Obama also said that he wants to make solar cells as cheap as paints, self-power buildings (smart building?) and some other interesting sci-tech researches. We will see breakthroughs in coming years produced by US national labs again, after deteriorated by wrong policies of Bush who is not-science-but-war friendly ex president.
Shall we start buying technology stocks once again? I am thinking that energy-related technologies will be booming, smart building system which can conserver more energy, which includes home automation that can control energy consumption to be more efficient, faster routers (we live in a connected world, don't we?), robotics will be more advanced, etc. Many new hi-tech jobs will be available.
I think 99% scientists and engineers should love this president.
Bravo to Obama!
Thursday, April 16, 2009
Mobile Platforms
- Nokia’s Symbian OS-based S60 platform has something for everyone — C, C++, Java, Python, WRT widgets, and Flash — but the APIs require some getting used to. SymbianC++ and Open C/C++ (a C programming interface with runtime Posix libraries) programs are packaged as metadata files that must be digitally signed for security checks or the application won’t execute. IT can therefore use security certificates to monitor and control in-house mobile applications.
- iPhone uses Objective-C — challenging even for experienced C, C++, and C# programmers. Developers coming from other languages face an even steeper learning curve. The Cocoa Touch programming interface and proprietary XCode integrated development environment (IDE) provide a powerful environment that includes a WYSIWYG interface builder. For Web-based apps, the SDK includes the HTML/JavaScript-based Dashcode framework. Everything in the iPhone runs at root level — and every process executing with root privileges can be a security threat. Additionally, the iPhone permits only one third-party app to run at a time. IPhone apps also must be digitally signed before they can execute.
- Android applications are written in Java, but not Java ME. Instead, the Android SDK is a combination of standard Java SE and Java ME methods and classes, as well as nonstandard ones. This means that there’ s a learning curve, even for seasoned Java developers. The Android Development Tools plug-in lets developers use Eclipse to write and debug applications. Again, Android apps must be signed or they won’t run. The SDK does provide a developer key, but a private key is required for public distribution.
- BlackBerry applications can be developed several ways: a Java-based IDE that provides access to RIM APIs and an Eclipse plug-in; a rapid application development approach that focuses on Web services using Visual Studio or Eclipse plug-ins and supports any .NET or Java language choice; or a Web-based app approach referred to as Browser Development, which lets developers create apps using existing BlackBerry browser software. The downside to writing apps using BlackBerry API extensions is that it ties the application to a particular device. Still, that’s no different than using the Android’s unique Java classes.
- Windows Mobile uses the .NET Compact Framework, which makes development relatively straightforward for developers familiar with .NET languages such as C#, Visual Basic .NET, and (for native code) Visual C++. Because the .NET Compact Framework is a subset of the .Net Framework, components from .NET-based desktop clients, application servers, and Web servers are available. The upside is companies that have standardized on Microsoft platforms and developer tools can jump into mobile development. The downside is the the apps run on a single platform — Windows Mobile OS.
Monday, April 13, 2009
Sniper location system
By David F. Salisbury
Published: March 19, 2009
I magine a platoon of soldiers fighting in a hazardous urban environment who carry personal digital assistants that can display the location of enemy shooters in three dimensions and accurately identify the caliber and type of weapons they are firing.
Engineers at Vanderbilt University's Institute for Software Integrated Systems (ISIS) have developed a system that can give soldiers just such an edge by turning their combat helmets into "smart nodes” in a wireless sensor network.
ISIS developed this novel technology with the support of the Defense Advanced Research Project Agency and the university has patented the system's key elements.
Like several other shooter location systems developed in recent years, the ISIS system relies on the sound waves produced when a high-powered rifle is fired. These acoustic signals have distinctive characteristics that allow the systems to pick them out from other loud noises and track them back to their source. Current systems, however, rely on centralized or stand-alone sensor arrays. This limits their accuracy and restricts them to identifying shooters at line-of-sight locations.
By contrast, the ISIS system combines information from a number of nodes to triangulate on shooter positions and improve the accuracy of its location identification process. It also uses a patented technique to filter out the echoes that can throw off other acoustic detection systems, explains Akos Ledeczi, the senior research scientist at ISIS who heads up the development effort.
"When DARPA gave us the assignment of creating a shooter location system using nodes with very limited capabilities, they didn't think we could solve the technical problems,” Ledeczi admits. "At first, I didn't think we could do it either, but we figured out how to make it work!”
Retired U.S. Army Lieutenant Colonel Albert Sciarretta, who assesses new military technologies in urban environments for DARPA, is one of the experts who is impressed by the ISIS system: "It's strong points are that it isn't limited to locating shots fired in direct line-of-sight, it can pick up multiple shooters at the same time, and it can identify the caliber and type of weapon that is being fired.”
Sciarretta adds, "A leader can use the information that this system provides to react tactically to enemy shooters in ways that limit the number of friendly force and non-combatant casualties. The ISIS system could be easily developed into an operational war-fighting system.”
When a high-powered rifle is fired, it produces two different kinds of sound waves. One is the "muzzle blast” that expands outward in a spherical wave from the muzzle. The second is a conical shock wave that is produced by the bullet as it travels at supersonic speeds. Each node of the shooter location system contains an array of four sensitive microphones. If at least three of the microphones in a single node detect the muzzle blast, the information allows the nodes' microprocessor to calculate the direction that the sound came from. If the same array also detects the arrival time and angle of the bullet shockwave, a simple calculation gives the shooter's location.
"Because the microphones on the helmet are so close together, the precision is not very high,” Ledeczi says. "However, the nodes are continuously exchanging the times and angles of arrival for these acoustic signals, along with their own locations and orientations. When two or more nodes detect the shot, they can provide the bearing with better than one degree accuracy. The range is typically within a few meters even from as far as 300 meters. The more sensors that pick up the shot, the more accurate the localization.”
The ISIS system communicates its findings with the personal digital assistants that the soldiers carry. The PDAs are loaded with maps or overhead pictures of the area upon which the shooter locations are displayed.
In 2006, a team from the National Institute of Standards and Technology at the U.S. Army Aberdeen Test Center independently determined the accuracy of the system. Firing positions were located at distances of 50 to 300 meters from a 10-node sensor network. Six different weapons were used. The only shots that the system sometimes failed to track accurately were those that passed to one side of all of the nodes.
The field tests demonstrated that the system can pick out the location of high-powered sniper rifles even when they are firing at the same time as a submachine gun like the AK-47. They also proved that it can identify the window that a rifle is firing through even when the rifle is completely inside the building, the technique preferred by trained snipers.
These tests were performed with sensors in fixed locations. One of the problems with using a mobile network has been keeping track of the positions of the mobile nodes with sufficient precision. Standard GPS locations are inadequate for this purpose and satellite coverage can be spotty in urban environments. The ISIS team has recently solved this problem by adding an inexpensive radio chip that allows them to track the relative position of nodes using high-precision radio interferometry. The university has applied for a patent on the technique.
The ISIS shooter system uses wireless nodes invented at UC Berkeley and produced by Crossbow Technology Inc. of San Jose, Calif. These smart nodes, or motes, form self-organizing wireless-sensor networks and are the realization of the Pentagon's "smart-dust” concept of radically reducing the size and cost of sensor networks for military applications. Current commercial shooter location systems are extremely expensive, with prices ranging from $10,000 to $50,000 per unit. By contrast, an entire node for the ISIS system weighs only slightly more than the four AA batteries that power it and costs about $1,000 to construct using currently available commercial hardware.
Scientiest found The Edge of Space
Where does space begin? Scientists at the University of Calgary have created a new instrument that is able to track the transition between the relatively gentle winds of Earth's atmosphere and the more violent flows of charged particles in space—flows that can reach speeds well over 1,000 km/hr. And they have accomplished this in unprecedented detail.
Data received from the U of C-designed instrument sent to space on a NASA launch from Alaska about two years ago was able to help pinpoint the so-called edge of space: the boundary between the Earth's atmosphere and outer space.
With that data, U of C scientists confirmed that space begins 118 km above Earth and the results were published this week in the Journal of Geophysical Research.
The instrument—called the Supra-Thermal Ion Imager—was carried by the JOULE-II rocket on Jan. 19, 2007. It travelled to an altitude of about 200 km above sea level and collected data for the five minutes it was moving through the "edge of space."
The Canadian Space Agency invested $422,000 in the development of the Supra-Thermal Ion Imager instrument on JOULE-II.
The ability to gather data in that area is significant because it's very difficult to make measurements in this region, which is too high for balloons and too low for satellites.
"It's only the second time that direct measurements of charged particle flows have been made in this region, and the first time all the ingredients—such as the upper atmospheric winds—have been included," says David Knudsen, associate professor in the Department of Physics and Astronomy at the University of Calgary.
Knudsen and his former PhD student Laureline Sangalli are the lead authors of the paper. Co-authors include: JOULE-II lead scientist Miguel Larsen of Clemson University, Robert Pfaff and Douglas Rowland of NASA Goddard Space Flight Center and T. Zhan of Conseco Inc.
"When you drag a heavy object over a surface, the interface becomes hot. In JOULE-II we were able to measure directly two regions being dragged past each other, one being the ionosphere—being driven by flows in space—and the other the earth's atmosphere," says Knudsen, who also is the head of the Space Physics Division of the Institute for Space Imaging Sciences (ISIS). The institute is a research partnership between the University of Calgary and University of Lethbridge.
The measurements confirmed what other scientists consider the boundary or edge of space.
"The results have given us a closer look at space, which is a benefit to pure research in space science," Knudsen says. "But it also allows us to calculate energy flows into the Earth's atmosphere that ultimately may be able to help us understand the interaction between space and our environment. That could mean a greater understanding of the link between sunspots and the warming and cooling of the Earth's climate as well as how space weather impacts satellites, communications, navigation, and power systems."
The U of C-designed instrument has been adopted by COM DEV, an Ontario-based global designer and manufacturer of space hardware, and is being used as a prototype for three instruments currently being readied to fly on the European Space Agency's "Swarm" satellite mission, set to launch late next year and to collect data for four years. The JOULE-II instrument is one in a long list of more than a dozen instruments designed by U of C scientists in the past forty years which have flown in space. There are at least five more being readied to go on missions in the next two years.
"Understanding the boundary between the Earth's atmosphere and outer space is fundamental to the bigger picture of the effects of space on the Earth's climate and environment," says Russ Taylor, the director of ISIS and head of the Department of Physics and Astronomy at the U of C. "This detection is part of a long history of success by ISIS researchers in designing and building innovative instruments flown on rockets and satellites to image the flow of matter and energy between the Earth and Space."
The paper "Rocket-based measurements of ion velocity, neutral wind, and electric field in the collisional transition region of the auroral ionosphere" was published this week in the Journal of Geophysical Research.